• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

JuliaLang / julia / #37753

19 Apr 2024 04:05AM UTC coverage: 86.721% (-0.7%) from 87.412%
#37753

push

local

web-flow
refactor `abstract_eval_statement_expr` (#54111)

This commit divides the various expr handlings into `abstract_eval_xxx`
subroutines in order to make the logic clearer and easier to follow.

167 of 186 new or added lines in 2 files covered. (89.78%)

632 existing lines in 36 files now uncovered.

75606 of 87183 relevant lines covered (86.72%)

14585289.37 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

38.48
/base/reinterpretarray.jl
1
# This file is a part of Julia. License is MIT: https://julialang.org/license
2

3
"""
4
Gives a reinterpreted view (of element type T) of the underlying array (of element type S).
5
If the size of `T` differs from the size of `S`, the array will be compressed/expanded in
6
the first dimension. The variant `reinterpret(reshape, T, a)` instead adds or consumes the first dimension
7
depending on the ratio of element sizes.
8
"""
9
struct ReinterpretArray{T,N,S,A<:AbstractArray{S},IsReshaped} <: AbstractArray{T, N}
10
    parent::A
11
    readable::Bool
12
    writable::Bool
13

14
    function throwbits(S::Type, T::Type, U::Type)
2✔
15
        @noinline
2✔
16
        throw(ArgumentError("cannot reinterpret `$(S)` as `$(T)`, type `$(U)` is not a bits type"))
2✔
17
    end
UNCOV
18
    function throwsize0(S::Type, T::Type, msg)
×
UNCOV
19
        @noinline
×
UNCOV
20
        throw(ArgumentError("cannot reinterpret a zero-dimensional `$(S)` array to `$(T)` which is of a $msg size"))
×
21
    end
UNCOV
22
    function throwsingleton(S::Type, T::Type)
×
UNCOV
23
        @noinline
×
UNCOV
24
        throw(ArgumentError("cannot reinterpret a `$(S)` array to `$(T)` which is a singleton type"))
×
25
    end
26

27
    global reinterpret
28

29
    @doc """
30
        reinterpret(T::DataType, A::AbstractArray)
31

32
    Construct a view of the array with the same binary data as the given
33
    array, but with `T` as element type.
34

35
    This function also works on "lazy" array whose elements are not computed until they are explicitly retrieved.
36
    For instance, `reinterpret` on the range `1:6` works similarly as on the dense vector `collect(1:6)`:
37

38
    ```jldoctest
39
    julia> reinterpret(Float32, UInt32[1 2 3 4 5])
40
    1×5 reinterpret(Float32, ::Matrix{UInt32}):
41
     1.0f-45  3.0f-45  4.0f-45  6.0f-45  7.0f-45
42

43
    julia> reinterpret(Complex{Int}, 1:6)
44
    3-element reinterpret(Complex{$Int}, ::UnitRange{$Int}):
45
     1 + 2im
46
     3 + 4im
47
     5 + 6im
48
    ```
49

50
    If the location of padding bits does not line up between `T` and `eltype(A)`, the resulting array will be
51
    read-only or write-only, to prevent invalid bits from being written to or read from, respectively.
52

53
    ```jldoctest
54
    julia> a = reinterpret(Tuple{UInt8, UInt32}, UInt32[1, 2])
55
    1-element reinterpret(Tuple{UInt8, UInt32}, ::Vector{UInt32}):
56
     (0x01, 0x00000002)
57

58
    julia> a[1] = 3
59
    ERROR: Padding of type Tuple{UInt8, UInt32} is not compatible with type UInt32.
60

61
    julia> b = reinterpret(UInt32, Tuple{UInt8, UInt32}[(0x01, 0x00000002)]); # showing will error
62

63
    julia> b[1]
64
    ERROR: Padding of type UInt32 is not compatible with type Tuple{UInt8, UInt32}.
65
    ```
66
    """
67
    function reinterpret(::Type{T}, a::A) where {T,N,S,A<:AbstractArray{S, N}}
750✔
68
        function thrownonint(S::Type, T::Type, dim)
6,697✔
69
            @noinline
1✔
70
            throw(ArgumentError("""
1✔
71
                cannot reinterpret an `$(S)` array to `$(T)` whose first dimension has size `$(dim)`.
72
                The resulting array would have non-integral first dimension.
73
                """))
74
        end
75
        function throwaxes1(S::Type, T::Type, ax1)
6,696✔
76
            @noinline
77
            throw(ArgumentError("cannot reinterpret a `$(S)` array to `$(T)` when the first axis is $ax1. Try reshaping first."))
78
        end
79
        isbitstype(T) || throwbits(S, T, T)
6,696✔
80
        isbitstype(S) || throwbits(S, T, S)
6,695✔
81
        (N != 0 || sizeof(T) == sizeof(S)) || throwsize0(S, T, "different")
6,694✔
82
        if N != 0 && sizeof(S) != sizeof(T)
6,694✔
83
            ax1 = axes(a)[1]
130,502✔
84
            dim = length(ax1)
6,320✔
85
            if issingletontype(T)
6,320✔
UNCOV
86
                issingletontype(S) || throwsingleton(S, T)
×
87
            else
88
                rem(dim*sizeof(S),sizeof(T)) == 0 || thrownonint(S, T, dim)
130,503✔
89
            end
90
            first(ax1) == 1 || throwaxes1(S, T, ax1)
6,319✔
91
        end
92
        readable = array_subpadding(T, S)
6,693✔
93
        writable = array_subpadding(S, T)
6,693✔
94
        new{T, N, S, A, false}(a, readable, writable)
133,541✔
95
    end
96
    reinterpret(::Type{T}, a::AbstractArray{T}) where {T} = a
154✔
97

98
    # With reshaping
99
    function reinterpret(::typeof(reshape), ::Type{T}, a::A) where {T,S,A<:AbstractArray{S}}
10✔
100
        function throwintmult(S::Type, T::Type)
864✔
101
            @noinline
102
            throw(ArgumentError("`reinterpret(reshape, T, a)` requires that one of `sizeof(T)` (got $(sizeof(T))) and `sizeof(eltype(a))` (got $(sizeof(S))) be an integer multiple of the other"))
103
        end
104
        function throwsize1(a::AbstractArray, T::Type)
864✔
105
            @noinline
106
            throw(ArgumentError("`reinterpret(reshape, $T, a)` where `eltype(a)` is $(eltype(a)) requires that `axes(a, 1)` (got $(axes(a, 1))) be equal to 1:$(sizeof(T) ÷ sizeof(eltype(a))) (from the ratio of element sizes)"))
107
        end
108
        function throwfromsingleton(S, T)
864✔
109
            @noinline
110
            throw(ArgumentError("`reinterpret(reshape, $T, a)` where `eltype(a)` is $S requires that $T be a singleton type, since $S is one"))
111
        end
112
        isbitstype(T) || throwbits(S, T, T)
864✔
113
        isbitstype(S) || throwbits(S, T, S)
864✔
114
        if sizeof(S) == sizeof(T)
864✔
115
            N = ndims(a)
1✔
116
        elseif sizeof(S) > sizeof(T)
863✔
117
            issingletontype(T) && throwsingleton(S, T)
860✔
118
            rem(sizeof(S), sizeof(T)) == 0 || throwintmult(S, T)
860✔
119
            N = ndims(a) + 1
860✔
120
        else
121
            issingletontype(S) && throwfromsingleton(S, T)
3✔
122
            rem(sizeof(T), sizeof(S)) == 0 || throwintmult(S, T)
3✔
123
            N = ndims(a) - 1
3✔
124
            N > -1 || throwsize0(S, T, "larger")
3✔
125
            axes(a, 1) == OneTo(sizeof(T) ÷ sizeof(S)) || throwsize1(a, T)
3✔
126
        end
127
        readable = array_subpadding(T, S)
864✔
128
        writable = array_subpadding(S, T)
864✔
129
        new{T, N, S, A, true}(a, readable, writable)
864✔
130
    end
UNCOV
131
    reinterpret(::typeof(reshape), ::Type{T}, a::AbstractArray{T}) where {T} = a
×
132
end
133

134
ReshapedReinterpretArray{T,N,S,A<:AbstractArray{S}} = ReinterpretArray{T,N,S,A,true}
135
NonReshapedReinterpretArray{T,N,S,A<:AbstractArray{S, N}} = ReinterpretArray{T,N,S,A,false}
136

137
"""
138
    reinterpret(reshape, T, A::AbstractArray{S}) -> B
139

140
Change the type-interpretation of `A` while consuming or adding a "channel dimension."
141

142
If `sizeof(T) = n*sizeof(S)` for `n>1`, `A`'s first dimension must be
143
of size `n` and `B` lacks `A`'s first dimension. Conversely, if `sizeof(S) = n*sizeof(T)` for `n>1`,
144
`B` gets a new first dimension of size `n`. The dimensionality is unchanged if `sizeof(T) == sizeof(S)`.
145

146
!!! compat "Julia 1.6"
147
    This method requires at least Julia 1.6.
148

149
# Examples
150

151
```jldoctest
152
julia> A = [1 2; 3 4]
153
2×2 Matrix{$Int}:
154
 1  2
155
 3  4
156

157
julia> reinterpret(reshape, Complex{Int}, A)    # the result is a vector
158
2-element reinterpret(reshape, Complex{$Int}, ::Matrix{$Int}) with eltype Complex{$Int}:
159
 1 + 3im
160
 2 + 4im
161

162
julia> a = [(1,2,3), (4,5,6)]
163
2-element Vector{Tuple{$Int, $Int, $Int}}:
164
 (1, 2, 3)
165
 (4, 5, 6)
166

167
julia> reinterpret(reshape, Int, a)             # the result is a matrix
168
3×2 reinterpret(reshape, $Int, ::Vector{Tuple{$Int, $Int, $Int}}) with eltype $Int:
169
 1  4
170
 2  5
171
 3  6
172
```
173
"""
174
reinterpret(::typeof(reshape), T::Type, a::AbstractArray)
175

176
reinterpret(::Type{T}, a::NonReshapedReinterpretArray) where {T} = reinterpret(T, a.parent)
155✔
UNCOV
177
reinterpret(::typeof(reshape), ::Type{T}, a::ReshapedReinterpretArray) where {T} = reinterpret(reshape, T, a.parent)
×
178

179
# Definition of StridedArray
180
StridedFastContiguousSubArray{T,N,A<:DenseArray} = FastContiguousSubArray{T,N,A}
181
StridedReinterpretArray{T,N,A<:Union{DenseArray,StridedFastContiguousSubArray},IsReshaped} = ReinterpretArray{T,N,S,A,IsReshaped} where S
182
StridedReshapedArray{T,N,A<:Union{DenseArray,StridedFastContiguousSubArray,StridedReinterpretArray}} = ReshapedArray{T,N,A}
183
StridedSubArray{T,N,A<:Union{DenseArray,StridedReshapedArray,StridedReinterpretArray},
184
    I<:Tuple{Vararg{Union{RangeIndex, ReshapedUnitRange, AbstractCartesianIndex}}}} = SubArray{T,N,A,I}
185
StridedArray{T,N} = Union{DenseArray{T,N}, StridedSubArray{T,N}, StridedReshapedArray{T,N}, StridedReinterpretArray{T,N}}
186
StridedVector{T} = StridedArray{T,1}
187
StridedMatrix{T} = StridedArray{T,2}
188
StridedVecOrMat{T} = Union{StridedVector{T}, StridedMatrix{T}}
189

190
strides(a::Union{DenseArray,StridedReshapedArray,StridedReinterpretArray}) = size_to_strides(1, size(a)...)
36,414,100✔
191
stride(A::Union{DenseArray,StridedReshapedArray,StridedReinterpretArray}, k::Integer) =
28,255,129✔
192
    k ≤ ndims(A) ? strides(A)[k] : length(A)
193

194
function strides(a::ReinterpretArray{T,<:Any,S,<:AbstractArray{S},IsReshaped}) where {T,S,IsReshaped}
195
    _checkcontiguous(Bool, a) && return size_to_strides(1, size(a)...)
96✔
196
    stp = strides(parent(a))
96✔
197
    els, elp = sizeof(T), sizeof(S)
96✔
198
    els == elp && return stp # 0dim parent is also handled here.
96✔
199
    IsReshaped && els < elp && return (1, _checked_strides(stp, els, elp)...)
96✔
UNCOV
200
    stp[1] == 1 || throw(ArgumentError("Parent must be contiguous in the 1st dimension!"))
×
UNCOV
201
    st′ = _checked_strides(tail(stp), els, elp)
×
UNCOV
202
    return IsReshaped ? st′ : (1, st′...)
×
203
end
204

205
@inline function _checked_strides(stp::Tuple, els::Integer, elp::Integer)
206
    if elp > els && rem(elp, els) == 0
96✔
207
        N = div(elp, els)
96✔
208
        return map(i -> N * i, stp)
192✔
209
    end
UNCOV
210
    drs = map(i -> divrem(elp * i, els), stp)
×
UNCOV
211
    all(i->iszero(i[2]), drs) ||
×
212
        throw(ArgumentError("Parent's strides could not be exactly divided!"))
UNCOV
213
    map(first, drs)
×
214
end
215

216
_checkcontiguous(::Type{Bool}, A::ReinterpretArray) = _checkcontiguous(Bool, parent(A))
1,935✔
217

218
similar(a::ReinterpretArray, T::Type, d::Dims) = similar(a.parent, T, d)
249,323✔
219

220
function check_readable(a::ReinterpretArray{T, N, S} where N) where {T,S}
221
    # See comment in check_writable
222
    if !a.readable && !array_subpadding(T, S)
39,316,141✔
UNCOV
223
        throw(PaddingError(T, S))
×
224
    end
225
end
226

227
function check_writable(a::ReinterpretArray{T, N, S} where N) where {T,S}
228
    # `array_subpadding` is relatively expensive (compared to a simple arrayref),
229
    # so it is cached in the array. However, it is computable at compile time if,
230
    # inference has the types available. By using this form of the check, we can
231
    # get the best of both worlds for the success case. If the types were not
232
    # available to inference, we simply need to check the field (relatively cheap)
233
    # and if they were we should be able to fold this check away entirely.
234
    if !a.writable && !array_subpadding(S, T)
78,268,395✔
UNCOV
235
        throw(PaddingError(T, S))
×
236
    end
237
end
238

239
## IndexStyle specializations
240

241
# For `reinterpret(reshape, T, a)` where we're adding a channel dimension and with
242
# `IndexStyle(a) == IndexLinear()`, it's advantageous to retain pseudo-linear indexing.
243
struct IndexSCartesian2{K} <: IndexStyle end   # K = sizeof(S) ÷ sizeof(T), a static-sized 2d cartesian iterator
1,799✔
244

245
IndexStyle(::Type{ReinterpretArray{T,N,S,A,false}}) where {T,N,S,A<:AbstractArray{S,N}} = IndexStyle(A)
19,159✔
246
function IndexStyle(::Type{ReinterpretArray{T,N,S,A,true}}) where {T,N,S,A<:AbstractArray{S}}
247
    if sizeof(T) < sizeof(S)
1,820✔
248
        IndexStyle(A) === IndexLinear() && return IndexSCartesian2{sizeof(S) ÷ sizeof(T)}()
1,800✔
249
        return IndexCartesian()
1✔
250
    end
251
    return IndexStyle(A)
20✔
252
end
UNCOV
253
IndexStyle(::IndexSCartesian2{K}, ::IndexSCartesian2{K}) where {K} = IndexSCartesian2{K}()
×
254

255
struct SCartesianIndex2{K}   # can't make <:AbstractCartesianIndex without N, and 2 would be a bit misleading
256
    i::Int
20✔
257
    j::Int
258
end
UNCOV
259
to_index(i::SCartesianIndex2) = i
×
260

261
struct SCartesianIndices2{K,R<:AbstractUnitRange{Int}} <: AbstractMatrix{SCartesianIndex2{K}}
262
    indices2::R
1✔
263
end
264
SCartesianIndices2{K}(indices2::AbstractUnitRange{Int}) where {K} = (@assert K::Int > 1; SCartesianIndices2{K,typeof(indices2)}(indices2))
1✔
265

UNCOV
266
eachindex(::IndexSCartesian2{K}, A::ReshapedReinterpretArray) where {K} = SCartesianIndices2{K}(eachindex(IndexLinear(), parent(A)))
×
UNCOV
267
@inline function eachindex(style::IndexSCartesian2{K}, A::AbstractArray, B::AbstractArray...) where {K}
×
UNCOV
268
    iter = eachindex(style, A)
×
UNCOV
269
    _all_match_first(C->eachindex(style, C), iter, B...) || throw_eachindex_mismatch_indices(IndexSCartesian2{K}(), axes(A), axes.(B)...)
×
UNCOV
270
    return iter
×
271
end
272

UNCOV
273
size(iter::SCartesianIndices2{K}) where K = (K, length(iter.indices2))
×
UNCOV
274
axes(iter::SCartesianIndices2{K}) where K = (OneTo(K), iter.indices2)
×
275

276
first(iter::SCartesianIndices2{K}) where {K} = SCartesianIndex2{K}(1, first(iter.indices2))
1✔
UNCOV
277
last(iter::SCartesianIndices2{K}) where {K}  = SCartesianIndex2{K}(K, last(iter.indices2))
×
278

279
@inline function getindex(iter::SCartesianIndices2{K}, i::Int, j::Int) where {K}
×
280
    @boundscheck checkbounds(iter, i, j)
×
281
    return SCartesianIndex2{K}(i, iter.indices2[j])
×
282
end
283

284
function iterate(iter::SCartesianIndices2{K}) where {K}
285
    ret = iterate(iter.indices2)
2✔
286
    ret === nothing && return nothing
1✔
287
    item2, state2 = ret
1✔
288
    return SCartesianIndex2{K}(1, item2), (1, item2, state2)
1✔
289
end
290

291
function iterate(iter::SCartesianIndices2{K}, (state1, item2, state2)) where {K}
292
    if state1 < K
9✔
293
        item1 = state1 + 1
6✔
294
        return SCartesianIndex2{K}(item1, item2), (item1, item2, state2)
6✔
295
    end
296
    ret = iterate(iter.indices2, state2)
5✔
297
    ret === nothing && return nothing
3✔
298
    item2, state2 = ret
2✔
299
    return SCartesianIndex2{K}(1, item2), (1, item2, state2)
2✔
300
end
301

302
SimdLoop.simd_outer_range(iter::SCartesianIndices2) = iter.indices2
×
303
SimdLoop.simd_inner_length(::SCartesianIndices2{K}, ::Any) where K = K
×
304
@inline function SimdLoop.simd_index(::SCartesianIndices2{K}, Ilast::Int, I1::Int) where {K}
×
305
    SCartesianIndex2{K}(I1+1, Ilast)
×
306
end
307

308
_maybe_reshape(::IndexSCartesian2, A::ReshapedReinterpretArray, I...) = A
×
309

310
# fallbacks
UNCOV
311
function _getindex(::IndexSCartesian2, A::AbstractArray{T,N}, I::Vararg{Int, N}) where {T,N}
×
UNCOV
312
    @_propagate_inbounds_meta
×
UNCOV
313
    getindex(A, I...)
×
314
end
315
function _setindex!(::IndexSCartesian2, A::AbstractArray{T,N}, v, I::Vararg{Int, N}) where {T,N}
×
316
    @_propagate_inbounds_meta
×
317
    setindex!(A, v, I...)
×
318
end
319
# fallbacks for array types that use "pass-through" indexing (e.g., `IndexStyle(A) = IndexStyle(parent(A))`)
320
# but which don't handle SCartesianIndex2
UNCOV
321
function _getindex(::IndexSCartesian2, A::AbstractArray{T,N}, ind::SCartesianIndex2) where {T,N}
×
UNCOV
322
    @_propagate_inbounds_meta
×
UNCOV
323
    J = _ind2sub(tail(axes(A)), ind.j)
×
UNCOV
324
    getindex(A, ind.i, J...)
×
325
end
UNCOV
326
function _setindex!(::IndexSCartesian2, A::AbstractArray{T,N}, v, ind::SCartesianIndex2) where {T,N}
×
UNCOV
327
    @_propagate_inbounds_meta
×
UNCOV
328
    J = _ind2sub(tail(axes(A)), ind.j)
×
UNCOV
329
    setindex!(A, v, ind.i, J...)
×
330
end
UNCOV
331
eachindex(style::IndexSCartesian2, A::AbstractArray) = eachindex(style, parent(A))
×
332

333
## AbstractArray interface
334

335
parent(a::ReinterpretArray) = a.parent
78,333,970✔
336
dataids(a::ReinterpretArray) = dataids(a.parent)
6,426✔
UNCOV
337
unaliascopy(a::NonReshapedReinterpretArray{T}) where {T} = reinterpret(T, unaliascopy(a.parent))
×
UNCOV
338
unaliascopy(a::ReshapedReinterpretArray{T}) where {T} = reinterpret(reshape, T, unaliascopy(a.parent))
×
339

340
function size(a::NonReshapedReinterpretArray{T,N,S} where {N}) where {T,S}
341
    psize = size(a.parent)
39,684✔
342
    size1 = issingletontype(T) ? psize[1] : div(psize[1]*sizeof(S), sizeof(T))
39,684✔
343
    tuple(size1, tail(psize)...)
39,684✔
344
end
345
function size(a::ReshapedReinterpretArray{T,N,S} where {N}) where {T,S}
346
    psize = size(a.parent)
4,213✔
347
    sizeof(S) > sizeof(T) && return (div(sizeof(S), sizeof(T)), psize...)
4,213✔
348
    sizeof(S) < sizeof(T) && return tail(psize)
99✔
349
    return psize
1✔
350
end
351
size(a::NonReshapedReinterpretArray{T,0}) where {T} = ()
1✔
352

353
function axes(a::NonReshapedReinterpretArray{T,N,S} where {N}) where {T,S}
354
    paxs = axes(a.parent)
79,497,290✔
355
    f, l = first(paxs[1]), length(paxs[1])
77,916,839✔
356
    size1 = issingletontype(T) ? l : div(l*sizeof(S), sizeof(T))
79,497,290✔
357
    tuple(oftype(paxs[1], f:f+size1-1), tail(paxs)...)
79,497,290✔
358
end
359
function axes(a::ReshapedReinterpretArray{T,N,S} where {N}) where {T,S}
360
    paxs = axes(a.parent)
9,421✔
361
    sizeof(S) > sizeof(T) && return (OneTo(div(sizeof(S), sizeof(T))), paxs...)
9,421✔
362
    sizeof(S) < sizeof(T) && return tail(paxs)
16✔
UNCOV
363
    return paxs
×
364
end
UNCOV
365
axes(a::NonReshapedReinterpretArray{T,0}) where {T} = ()
×
366

367
has_offset_axes(a::ReinterpretArray) = has_offset_axes(a.parent)
4,240✔
368

369
elsize(::Type{<:ReinterpretArray{T}}) where {T} = sizeof(T)
78,333,716✔
370
cconvert(::Type{Ptr{T}}, a::ReinterpretArray{T,N,S} where N) where {T,S} = cconvert(Ptr{S}, a.parent)
80,416,441✔
371

UNCOV
372
@propagate_inbounds function getindex(a::NonReshapedReinterpretArray{T,0,S}) where {T,S}
×
UNCOV
373
    if isprimitivetype(T) && isprimitivetype(S)
×
UNCOV
374
        reinterpret(T, a.parent[])
×
375
    else
UNCOV
376
        a[firstindex(a)]
×
377
    end
378
end
379

380
check_ptr_indexable(a::ReinterpretArray, sz = elsize(a)) = check_ptr_indexable(parent(a), sz)
156,663,855✔
381
check_ptr_indexable(a::ReshapedArray, sz) = check_ptr_indexable(parent(a), sz)
6✔
382
check_ptr_indexable(a::FastContiguousSubArray, sz) = check_ptr_indexable(parent(a), sz)
309,754✔
383
check_ptr_indexable(a::Array, sz) = sizeof(eltype(a)) !== sz
78,006,703✔
384
check_ptr_indexable(a::Memory, sz) = true
×
385
check_ptr_indexable(a::AbstractArray, sz) = false
×
386

387
@propagate_inbounds getindex(a::ReinterpretArray) = a[firstindex(a)]
1✔
388

389
@propagate_inbounds isassigned(a::ReinterpretArray, inds::Integer...) = checkbounds(Bool, a, inds...) && (check_ptr_indexable(a) || _isassigned_ra(a, inds...))
21,096✔
UNCOV
390
@propagate_inbounds isassigned(a::ReinterpretArray, inds::SCartesianIndex2) = isassigned(a.parent, inds.j)
×
391
@propagate_inbounds _isassigned_ra(a::ReinterpretArray, inds...) = true # that is not entirely true, but computing exactly which indexes will be accessed in the parent requires a lot of duplication from the _getindex_ra code
×
392

393
@propagate_inbounds function getindex(a::ReinterpretArray{T,N,S}, inds::Vararg{Int, N}) where {T,N,S}
3✔
394
    check_readable(a)
41,611,092✔
395
    check_ptr_indexable(a) && return _getindex_ptr(a, inds...)
44,257,689✔
396
    _getindex_ra(a, inds[1], tail(inds))
41,716,739✔
397
end
398

399
@propagate_inbounds function getindex(a::ReinterpretArray{T,N,S}, i::Int) where {T,N,S}
400
    check_readable(a)
6,529✔
401
    check_ptr_indexable(a) && return _getindex_ptr(a, i)
6,529✔
402
    if isa(IndexStyle(a), IndexLinear)
299✔
403
        return _getindex_ra(a, i, ())
299✔
404
    end
405
    # Convert to full indices here, to avoid needing multiple conversions in
406
    # the loop in _getindex_ra
UNCOV
407
    inds = _to_subscript_indices(a, i)
×
UNCOV
408
    isempty(inds) ? _getindex_ra(a, 1, ()) : _getindex_ra(a, inds[1], tail(inds))
×
409
end
410

UNCOV
411
@propagate_inbounds function getindex(a::ReshapedReinterpretArray{T,N,S}, ind::SCartesianIndex2) where {T,N,S}
×
UNCOV
412
    check_readable(a)
×
UNCOV
413
    s = Ref{S}(a.parent[ind.j])
×
UNCOV
414
    tptr = Ptr{T}(unsafe_convert(Ref{S}, s))
×
UNCOV
415
    GC.@preserve s return unsafe_load(tptr, ind.i)
×
416
end
417

418
@inline function _getindex_ptr(a::ReinterpretArray{T}, inds...) where {T}
419
    @boundscheck checkbounds(a, inds...)
41,632,918✔
420
    li = _to_linear_index(a, inds...)
38,986,321✔
421
    ap = cconvert(Ptr{T}, a)
41,632,918✔
422
    p = unsafe_convert(Ptr{T}, ap) + sizeof(T) * (li - 1)
41,632,918✔
423
    GC.@preserve ap return unsafe_load(p)
41,632,918✔
424
end
425

426
@propagate_inbounds function _getindex_ra(a::NonReshapedReinterpretArray{T,N,S}, i1::Int, tailinds::TT) where {T,N,S,TT}
427
    # Make sure to match the scalar reinterpret if that is applicable
428
    if sizeof(T) == sizeof(S) && (fieldcount(T) + fieldcount(S)) == 0
2,631,294✔
429
        if issingletontype(T) # singleton types
2,630,990✔
UNCOV
430
            @boundscheck checkbounds(a, i1, tailinds...)
×
UNCOV
431
            return T.instance
×
432
        end
433
        return reinterpret(T, a.parent[i1, tailinds...])
41,716,681✔
434
    else
435
        @boundscheck checkbounds(a, i1, tailinds...)
304✔
436
        ind_start, sidx = divrem((i1-1)*sizeof(T), sizeof(S))
304✔
437
        # Optimizations that avoid branches
438
        if sizeof(T) % sizeof(S) == 0
304✔
439
            # T is bigger than S and contains an integer number of them
440
            n = sizeof(T) ÷ sizeof(S)
5✔
441
            t = Ref{T}()
5✔
442
            GC.@preserve t begin
5✔
443
                sptr = Ptr{S}(unsafe_convert(Ref{T}, t))
5✔
444
                for i = 1:n
5✔
445
                     s = a.parent[ind_start + i, tailinds...]
10✔
446
                     unsafe_store!(sptr, s, i)
10✔
447
                end
15✔
448
            end
449
            return t[]
5✔
450
        elseif sizeof(S) % sizeof(T) == 0
299✔
451
            # S is bigger than T and contains an integer number of them
452
            s = Ref{S}(a.parent[ind_start + 1, tailinds...])
299✔
453
            GC.@preserve s begin
299✔
454
                tptr = Ptr{T}(unsafe_convert(Ref{S}, s))
299✔
455
                return unsafe_load(tptr + sidx)
299✔
456
            end
457
        else
UNCOV
458
            i = 1
×
UNCOV
459
            nbytes_copied = 0
×
460
            # This is a bit complicated to deal with partial elements
461
            # at both the start and the end. LLVM will fold as appropriate,
462
            # once it knows the data layout
UNCOV
463
            s = Ref{S}()
×
UNCOV
464
            t = Ref{T}()
×
UNCOV
465
            GC.@preserve s t begin
×
UNCOV
466
                sptr = Ptr{S}(unsafe_convert(Ref{S}, s))
×
UNCOV
467
                tptr = Ptr{T}(unsafe_convert(Ref{T}, t))
×
UNCOV
468
                while nbytes_copied < sizeof(T)
×
UNCOV
469
                    s[] = a.parent[ind_start + i, tailinds...]
×
UNCOV
470
                    nb = min(sizeof(S) - sidx, sizeof(T)-nbytes_copied)
×
UNCOV
471
                    memcpy(tptr + nbytes_copied, sptr + sidx, nb)
×
UNCOV
472
                    nbytes_copied += nb
×
UNCOV
473
                    sidx = 0
×
UNCOV
474
                    i += 1
×
UNCOV
475
                end
×
476
            end
UNCOV
477
            return t[]
×
478
        end
479
    end
480
end
481

482
@propagate_inbounds function _getindex_ra(a::ReshapedReinterpretArray{T,N,S}, i1::Int, tailinds::TT) where {T,N,S,TT}
483
    # Make sure to match the scalar reinterpret if that is applicable
484
    if sizeof(T) == sizeof(S) && (fieldcount(T) + fieldcount(S)) == 0
6✔
UNCOV
485
        if issingletontype(T) # singleton types
×
UNCOV
486
            @boundscheck checkbounds(a, i1, tailinds...)
×
UNCOV
487
            return T.instance
×
488
        end
UNCOV
489
        return reinterpret(T, a.parent[i1, tailinds...])
×
490
    end
491
    @boundscheck checkbounds(a, i1, tailinds...)
6✔
492
    if sizeof(T) >= sizeof(S)
6✔
493
        t = Ref{T}()
6✔
494
        GC.@preserve t begin
6✔
495
            sptr = Ptr{S}(unsafe_convert(Ref{T}, t))
6✔
496
            if sizeof(T) > sizeof(S)
6✔
497
                # Extra dimension in the parent array
498
                n = sizeof(T) ÷ sizeof(S)
6✔
499
                if isempty(tailinds) && IndexStyle(a.parent) === IndexLinear()
6✔
500
                    offset = n * (i1 - firstindex(a))
6✔
501
                    for i = 1:n
6✔
502
                        s = a.parent[i + offset]
48✔
503
                        unsafe_store!(sptr, s, i)
48✔
504
                    end
90✔
505
                else
UNCOV
506
                    for i = 1:n
×
UNCOV
507
                        s = a.parent[i, i1, tailinds...]
×
UNCOV
508
                        unsafe_store!(sptr, s, i)
×
UNCOV
509
                    end
×
510
                end
511
            else
512
                # No extra dimension
UNCOV
513
                s = a.parent[i1, tailinds...]
×
514
                unsafe_store!(sptr, s)
6✔
515
            end
516
        end
517
        return t[]
6✔
518
    end
519
    # S is bigger than T and contains an integer number of them
520
    # n = sizeof(S) ÷ sizeof(T)
UNCOV
521
    s = Ref{S}()
×
UNCOV
522
    GC.@preserve s begin
×
UNCOV
523
        tptr = Ptr{T}(unsafe_convert(Ref{S}, s))
×
UNCOV
524
        s[] = a.parent[tailinds...]
×
UNCOV
525
        return unsafe_load(tptr, i1)
×
526
    end
527
end
528

UNCOV
529
@propagate_inbounds function setindex!(a::NonReshapedReinterpretArray{T,0,S}, v) where {T,S}
×
UNCOV
530
    if isprimitivetype(S) && isprimitivetype(T)
×
UNCOV
531
        a.parent[] = reinterpret(S, v)
×
UNCOV
532
        return a
×
533
    end
UNCOV
534
    setindex!(a, v, firstindex(a))
×
535
end
536

UNCOV
537
@propagate_inbounds setindex!(a::ReinterpretArray, v) = setindex!(a, v, firstindex(a))
×
538

539
@propagate_inbounds function setindex!(a::ReinterpretArray{T,N,S}, v, inds::Vararg{Int, N}) where {T,N,S}
3✔
540
    check_writable(a)
78,397,211✔
541
    check_ptr_indexable(a) && return _setindex_ptr!(a, v, inds...)
78,397,211✔
542
    _setindex_ra!(a, v, inds[1], tail(inds))
40,075,017✔
543
end
544

UNCOV
545
@propagate_inbounds function setindex!(a::ReinterpretArray{T,N,S}, v, i::Int) where {T,N,S}
×
UNCOV
546
    check_writable(a)
×
UNCOV
547
    check_ptr_indexable(a) && return _setindex_ptr!(a, v, i)
×
UNCOV
548
    if isa(IndexStyle(a), IndexLinear)
×
UNCOV
549
        return _setindex_ra!(a, v, i, ())
×
550
    end
UNCOV
551
    inds = _to_subscript_indices(a, i)
×
UNCOV
552
    _setindex_ra!(a, v, inds[1], tail(inds))
×
553
end
554

UNCOV
555
@propagate_inbounds function setindex!(a::ReshapedReinterpretArray{T,N,S}, v, ind::SCartesianIndex2) where {T,N,S}
×
UNCOV
556
    check_writable(a)
×
UNCOV
557
    v = convert(T, v)::T
×
UNCOV
558
    s = Ref{S}(a.parent[ind.j])
×
UNCOV
559
    GC.@preserve s begin
×
UNCOV
560
        tptr = Ptr{T}(unsafe_convert(Ref{S}, s))
×
UNCOV
561
        unsafe_store!(tptr, v, ind.i)
×
562
    end
UNCOV
563
    a.parent[ind.j] = s[]
×
UNCOV
564
    return a
×
565
end
566

567
@inline function _setindex_ptr!(a::ReinterpretArray{T}, v, inds...) where {T}
568
    @boundscheck checkbounds(a, inds...)
38,777,056✔
569
    li = _to_linear_index(a, inds...)
38,777,056✔
570
    ap = cconvert(Ptr{T}, a)
38,777,056✔
571
    p = unsafe_convert(Ptr{T}, ap) + sizeof(T) * (li - 1)
38,777,056✔
572
    GC.@preserve ap unsafe_store!(p, v)
38,777,056✔
573
    return a
38,777,056✔
574
end
575

576
@propagate_inbounds function _setindex_ra!(a::NonReshapedReinterpretArray{T,N,S}, v, i1::Int, tailinds::TT) where {T,N,S,TT}
577
    v = convert(T, v)::T
39,620,155✔
578
    # Make sure to match the scalar reinterpret if that is applicable
579
    if sizeof(T) == sizeof(S) && (fieldcount(T) + fieldcount(S)) == 0
39,620,155✔
580
        if issingletontype(T) # singleton types
39,620,155✔
UNCOV
581
            @boundscheck checkbounds(a, i1, tailinds...)
×
582
            # setindex! is a noop except for the index check
583
        else
584
            setindex!(a.parent, reinterpret(S, v), i1, tailinds...)
40,075,017✔
585
        end
586
    else
UNCOV
587
        @boundscheck checkbounds(a, i1, tailinds...)
×
UNCOV
588
        ind_start, sidx = divrem((i1-1)*sizeof(T), sizeof(S))
×
589
        # Optimizations that avoid branches
UNCOV
590
        if sizeof(T) % sizeof(S) == 0
×
591
            # T is bigger than S and contains an integer number of them
UNCOV
592
            t = Ref{T}(v)
×
UNCOV
593
            GC.@preserve t begin
×
UNCOV
594
                sptr = Ptr{S}(unsafe_convert(Ref{T}, t))
×
UNCOV
595
                n = sizeof(T) ÷ sizeof(S)
×
UNCOV
596
                for i = 1:n
×
UNCOV
597
                    s = unsafe_load(sptr, i)
×
UNCOV
598
                    a.parent[ind_start + i, tailinds...] = s
×
UNCOV
599
                end
×
600
            end
UNCOV
601
        elseif sizeof(S) % sizeof(T) == 0
×
602
            # S is bigger than T and contains an integer number of them
UNCOV
603
            s = Ref{S}(a.parent[ind_start + 1, tailinds...])
×
UNCOV
604
            GC.@preserve s begin
×
UNCOV
605
                tptr = Ptr{T}(unsafe_convert(Ref{S}, s))
×
UNCOV
606
                unsafe_store!(tptr + sidx, v)
×
UNCOV
607
                a.parent[ind_start + 1, tailinds...] = s[]
×
608
            end
609
        else
UNCOV
610
            t = Ref{T}(v)
×
UNCOV
611
            s = Ref{S}()
×
UNCOV
612
            GC.@preserve t s begin
×
UNCOV
613
                tptr = Ptr{UInt8}(unsafe_convert(Ref{T}, t))
×
UNCOV
614
                sptr = Ptr{UInt8}(unsafe_convert(Ref{S}, s))
×
UNCOV
615
                nbytes_copied = 0
×
UNCOV
616
                i = 1
×
617
                # Deal with any partial elements at the start. We'll have to copy in the
618
                # element from the original array and overwrite the relevant parts
UNCOV
619
                if sidx != 0
×
UNCOV
620
                    s[] = a.parent[ind_start + i, tailinds...]
×
UNCOV
621
                    nb = min((sizeof(S) - sidx) % UInt, sizeof(T) % UInt)
×
UNCOV
622
                    memcpy(sptr + sidx, tptr, nb)
×
UNCOV
623
                    nbytes_copied += nb
×
UNCOV
624
                    a.parent[ind_start + i, tailinds...] = s[]
×
UNCOV
625
                    i += 1
×
UNCOV
626
                    sidx = 0
×
627
                end
628
                # Deal with the main body of elements
UNCOV
629
                while nbytes_copied < sizeof(T) && (sizeof(T) - nbytes_copied) > sizeof(S)
×
UNCOV
630
                    nb = min(sizeof(S), sizeof(T) - nbytes_copied)
×
UNCOV
631
                    memcpy(sptr, tptr + nbytes_copied, nb)
×
UNCOV
632
                    nbytes_copied += nb
×
UNCOV
633
                    a.parent[ind_start + i, tailinds...] = s[]
×
UNCOV
634
                    i += 1
×
UNCOV
635
                end
×
636
                # Deal with trailing partial elements
UNCOV
637
                if nbytes_copied < sizeof(T)
×
UNCOV
638
                    s[] = a.parent[ind_start + i, tailinds...]
×
UNCOV
639
                    nb = min(sizeof(S), sizeof(T) - nbytes_copied)
×
UNCOV
640
                    memcpy(sptr, tptr + nbytes_copied, nb)
×
UNCOV
641
                    a.parent[ind_start + i, tailinds...] = s[]
×
642
                end
643
            end
644
        end
645
    end
646
    return a
40,075,017✔
647
end
648

UNCOV
649
@propagate_inbounds function _setindex_ra!(a::ReshapedReinterpretArray{T,N,S}, v, i1::Int, tailinds::TT) where {T,N,S,TT}
×
UNCOV
650
    v = convert(T, v)::T
×
651
    # Make sure to match the scalar reinterpret if that is applicable
UNCOV
652
    if sizeof(T) == sizeof(S) && (fieldcount(T) + fieldcount(S)) == 0
×
UNCOV
653
        if issingletontype(T) # singleton types
×
UNCOV
654
            @boundscheck checkbounds(a, i1, tailinds...)
×
655
            # setindex! is a noop except for the index check
656
        else
UNCOV
657
            setindex!(a.parent, reinterpret(S, v), i1, tailinds...)
×
658
        end
659
    end
UNCOV
660
    @boundscheck checkbounds(a, i1, tailinds...)
×
UNCOV
661
    if sizeof(T) >= sizeof(S)
×
UNCOV
662
        t = Ref{T}(v)
×
UNCOV
663
        GC.@preserve t begin
×
UNCOV
664
            sptr = Ptr{S}(unsafe_convert(Ref{T}, t))
×
UNCOV
665
            if sizeof(T) > sizeof(S)
×
666
                # Extra dimension in the parent array
UNCOV
667
                n = sizeof(T) ÷ sizeof(S)
×
UNCOV
668
                if isempty(tailinds) && IndexStyle(a.parent) === IndexLinear()
×
UNCOV
669
                    offset = n * (i1 - firstindex(a))
×
UNCOV
670
                    for i = 1:n
×
UNCOV
671
                        s = unsafe_load(sptr, i)
×
UNCOV
672
                        a.parent[i + offset] = s
×
UNCOV
673
                    end
×
674
                else
UNCOV
675
                    for i = 1:n
×
UNCOV
676
                        s = unsafe_load(sptr, i)
×
UNCOV
677
                        a.parent[i, i1, tailinds...] = s
×
UNCOV
678
                    end
×
679
                end
680
            else # sizeof(T) == sizeof(S)
681
                # No extra dimension
UNCOV
682
                s = unsafe_load(sptr)
×
UNCOV
683
                a.parent[i1, tailinds...] = s
×
684
            end
685
        end
686
    else
687
        # S is bigger than T and contains an integer number of them
UNCOV
688
        s = Ref{S}()
×
UNCOV
689
        GC.@preserve s begin
×
UNCOV
690
            tptr = Ptr{T}(unsafe_convert(Ref{S}, s))
×
UNCOV
691
            s[] = a.parent[tailinds...]
×
UNCOV
692
            unsafe_store!(tptr, v, i1)
×
UNCOV
693
            a.parent[tailinds...] = s[]
×
694
        end
695
    end
UNCOV
696
    return a
×
697
end
698

699
# Padding
700
struct Padding
UNCOV
701
    offset::Int # 0-indexed offset of the next valid byte; sizeof(T) indicates trailing padding
×
702
    size::Int   # bytes of padding before a valid byte
703
end
UNCOV
704
function intersect(p1::Padding, p2::Padding)
×
UNCOV
705
    start = max(p1.offset, p2.offset)
×
UNCOV
706
    stop = min(p1.offset + p1.size, p2.offset + p2.size)
×
UNCOV
707
    Padding(start, max(0, stop-start))
×
708
end
709

710
struct PaddingError <: Exception
711
    S::Type
712
    T::Type
713
end
714

715
function showerror(io::IO, p::PaddingError)
×
716
    print(io, "Padding of type $(p.S) is not compatible with type $(p.T).")
×
717
end
718

719
"""
720
    CyclePadding(padding, total_size)
721

722
Cycles an iterator of `Padding` structs, restarting the padding at `total_size`.
723
E.g. if `padding` is all the padding in a struct and `total_size` is the total
724
aligned size of that array, `CyclePadding` will correspond to the padding in an
725
infinite vector of such structs.
726
"""
727
struct CyclePadding{P}
728
    padding::P
51✔
729
    total_size::Int
730
end
731
eltype(::Type{<:CyclePadding}) = Padding
×
732
IteratorSize(::Type{<:CyclePadding}) = IsInfinite()
×
733
isempty(cp::CyclePadding) = isempty(cp.padding)
×
734
function iterate(cp::CyclePadding)
240✔
735
    y = iterate(cp.padding)
240✔
736
    y === nothing && return nothing
240✔
UNCOV
737
    y[1], (0, y[2])
×
738
end
UNCOV
739
function iterate(cp::CyclePadding, state::Tuple)
×
UNCOV
740
    y = iterate(cp.padding, tail(state)...)
×
UNCOV
741
    y === nothing && return iterate(cp, (state[1]+cp.total_size,))
×
UNCOV
742
    Padding(y[1].offset+state[1], y[1].size), (state[1], tail(y)...)
×
743
end
744

745
"""
746
    Compute the location of padding in an isbits datatype. Recursive over the fields of that type.
747
"""
748
@assume_effects :foldable function padding(T::DataType, baseoffset::Int = 0)
682✔
749
    pads = Padding[]
874✔
750
    last_end::Int = baseoffset
×
751
    for i = 1:fieldcount(T)
682✔
752
        offset = baseoffset + Int(fieldoffset(T, i))
474✔
753
        fT = fieldtype(T, i)
474✔
754
        append!(pads, padding(fT, offset))
474✔
755
        if offset != last_end
474✔
UNCOV
756
            push!(pads, Padding(offset, offset-last_end))
×
757
        end
758
        last_end = offset + sizeof(fT)
474✔
759
    end
823✔
760
    if 0 < last_end - baseoffset < sizeof(T)
682✔
UNCOV
761
        push!(pads, Padding(baseoffset + sizeof(T), sizeof(T) - last_end + baseoffset))
×
762
    end
763
    return Core.svec(pads...)
746✔
764
end
765

UNCOV
766
function CyclePadding(T::DataType)
×
UNCOV
767
    a, s = datatype_alignment(T), sizeof(T)
×
UNCOV
768
    as = s + (a - (s % a)) % a
×
UNCOV
769
    pad = padding(T)
×
UNCOV
770
    if s != as
×
771
        pad = Core.svec(pad..., Padding(s, as - s))
×
772
    end
UNCOV
773
    CyclePadding(pad, as)
×
774
end
775

UNCOV
776
@assume_effects :total function array_subpadding(S, T)
×
UNCOV
777
    lcm_size = lcm(sizeof(S), sizeof(T))
×
UNCOV
778
    s, t = CyclePadding(S), CyclePadding(T)
×
UNCOV
779
    checked_size = 0
×
780
    # use of Stateful harms inference and makes this vulnerable to invalidation
UNCOV
781
    (pad, tstate) = let
×
UNCOV
782
        it = iterate(t)
×
UNCOV
783
        it === nothing && return true
×
UNCOV
784
        it
×
785
    end
UNCOV
786
    (ps, sstate) = let
×
UNCOV
787
        it = iterate(s)
×
UNCOV
788
        it === nothing && return false
×
UNCOV
789
        it
×
790
    end
UNCOV
791
    while checked_size < lcm_size
×
UNCOV
792
        while true
×
793
            # See if there's corresponding padding in S
UNCOV
794
            ps.offset > pad.offset && return false
×
UNCOV
795
            intersect(ps, pad) == pad && break
×
UNCOV
796
            ps, sstate = iterate(s, sstate)
×
UNCOV
797
        end
×
UNCOV
798
        checked_size = pad.offset + pad.size
×
UNCOV
799
        pad, tstate = iterate(t, tstate)
×
UNCOV
800
    end
×
UNCOV
801
    return true
×
802
end
803

804
@assume_effects :foldable function struct_subpadding(::Type{Out}, ::Type{In}) where {Out, In}
64✔
805
    padding(Out) == padding(In)
64✔
806
end
807

UNCOV
808
@assume_effects :foldable function packedsize(::Type{T}) where T
×
UNCOV
809
    pads = padding(T)
×
UNCOV
810
    return sizeof(T) - sum((p.size for p ∈ pads), init = 0)
×
811
end
812

813
@assume_effects :foldable ispacked(::Type{T}) where T = isempty(padding(T))
×
814

UNCOV
815
function _copytopacked!(ptr_out::Ptr{Out}, ptr_in::Ptr{In}) where {Out, In}
×
UNCOV
816
    writeoffset = 0
×
UNCOV
817
    for i ∈ 1:fieldcount(In)
×
UNCOV
818
        readoffset = fieldoffset(In, i)
×
UNCOV
819
        fT = fieldtype(In, i)
×
UNCOV
820
        if ispacked(fT)
×
UNCOV
821
            readsize = sizeof(fT)
×
UNCOV
822
            memcpy(ptr_out + writeoffset, ptr_in + readoffset, readsize)
×
UNCOV
823
            writeoffset += readsize
×
824
        else # nested padded type
UNCOV
825
            _copytopacked!(ptr_out + writeoffset, Ptr{fT}(ptr_in + readoffset))
×
UNCOV
826
            writeoffset += packedsize(fT)
×
827
        end
UNCOV
828
    end
×
829
end
830

UNCOV
831
function _copyfrompacked!(ptr_out::Ptr{Out}, ptr_in::Ptr{In}) where {Out, In}
×
UNCOV
832
    readoffset = 0
×
UNCOV
833
    for i ∈ 1:fieldcount(Out)
×
UNCOV
834
        writeoffset = fieldoffset(Out, i)
×
UNCOV
835
        fT = fieldtype(Out, i)
×
UNCOV
836
        if ispacked(fT)
×
UNCOV
837
            writesize = sizeof(fT)
×
UNCOV
838
            memcpy(ptr_out + writeoffset, ptr_in + readoffset, writesize)
×
UNCOV
839
            readoffset += writesize
×
840
        else # nested padded type
UNCOV
841
            _copyfrompacked!(Ptr{fT}(ptr_out + writeoffset), ptr_in + readoffset)
×
UNCOV
842
            readoffset += packedsize(fT)
×
843
        end
UNCOV
844
    end
×
845
end
846

847
@inline function _reinterpret(::Type{Out}, x::In) where {Out, In}
848
    # handle non-primitive types
849
    isbitstype(Out) || throw(ArgumentError("Target type for `reinterpret` must be isbits"))
3,592✔
850
    isbitstype(In) || throw(ArgumentError("Source type for `reinterpret` must be isbits"))
3,592✔
851
    inpackedsize = packedsize(In)
3,592✔
852
    outpackedsize = packedsize(Out)
3,592✔
853
    inpackedsize == outpackedsize ||
3,592✔
854
        throw(ArgumentError("Packed sizes of types $Out and $In do not match; got $outpackedsize \
855
            and $inpackedsize, respectively."))
856
    in = Ref{In}(x)
3,591✔
857
    out = Ref{Out}()
3,591✔
858
    if struct_subpadding(Out, In)
3,591✔
859
        # if packed the same, just copy
860
        GC.@preserve in out begin
3,591✔
861
            ptr_in = unsafe_convert(Ptr{In}, in)
3,591✔
862
            ptr_out = unsafe_convert(Ptr{Out}, out)
3,591✔
863
            memcpy(ptr_out, ptr_in, sizeof(Out))
3,591✔
864
        end
865
        return out[]
3,591✔
866
    else
867
        # mismatched padding
UNCOV
868
        GC.@preserve in out begin
×
UNCOV
869
            ptr_in = unsafe_convert(Ptr{In}, in)
×
UNCOV
870
            ptr_out = unsafe_convert(Ptr{Out}, out)
×
871

UNCOV
872
            if fieldcount(In) > 0 && ispacked(Out)
×
873
                _copytopacked!(ptr_out, ptr_in)
×
UNCOV
874
            elseif fieldcount(Out) > 0 && ispacked(In)
×
875
                _copyfrompacked!(ptr_out, ptr_in)
×
876
            else
UNCOV
877
                packed = Ref{NTuple{inpackedsize, UInt8}}()
×
UNCOV
878
                GC.@preserve packed begin
×
UNCOV
879
                    ptr_packed = unsafe_convert(Ptr{NTuple{inpackedsize, UInt8}}, packed)
×
UNCOV
880
                    _copytopacked!(ptr_packed, ptr_in)
×
UNCOV
881
                    _copyfrompacked!(ptr_out, ptr_packed)
×
882
                end
883
            end
884
        end
UNCOV
885
        return out[]
×
886
    end
887
end
888

889

890
# Reductions with IndexSCartesian2
891

UNCOV
892
function _mapreduce(f::F, op::OP, style::IndexSCartesian2{K}, A::AbstractArrayOrBroadcasted) where {F,OP,K}
×
UNCOV
893
    inds = eachindex(style, A)
×
UNCOV
894
    n = size(inds)[2]
×
UNCOV
895
    if n == 0
×
896
        return mapreduce_empty_iter(f, op, A, IteratorEltype(A))
×
897
    else
UNCOV
898
        return mapreduce_impl(f, op, A, first(inds), last(inds))
×
899
    end
900
end
901

UNCOV
902
@noinline function mapreduce_impl(f::F, op::OP, A::AbstractArrayOrBroadcasted,
×
903
                                  ifirst::SCI, ilast::SCI, blksize::Int) where {F,OP,SCI<:SCartesianIndex2{K}} where K
UNCOV
904
    if ilast.j - ifirst.j < blksize
×
905
        # sequential portion
UNCOV
906
        @inbounds a1 = A[ifirst]
×
UNCOV
907
        @inbounds a2 = A[SCI(2,ifirst.j)]
×
UNCOV
908
        v = op(f(a1), f(a2))
×
UNCOV
909
        @simd for i = ifirst.i + 2 : K
×
UNCOV
910
            @inbounds ai = A[SCI(i,ifirst.j)]
×
UNCOV
911
            v = op(v, f(ai))
×
UNCOV
912
        end
×
913
        # Remaining columns
UNCOV
914
        for j = ifirst.j+1 : ilast.j
×
UNCOV
915
            @simd for i = 1:K
×
UNCOV
916
                @inbounds ai = A[SCI(i,j)]
×
UNCOV
917
                v = op(v, f(ai))
×
UNCOV
918
            end
×
UNCOV
919
        end
×
UNCOV
920
        return v
×
921
    else
922
        # pairwise portion
UNCOV
923
        jmid = ifirst.j + (ilast.j - ifirst.j) >> 1
×
UNCOV
924
        v1 = mapreduce_impl(f, op, A, ifirst, SCI(K,jmid), blksize)
×
UNCOV
925
        v2 = mapreduce_impl(f, op, A, SCI(1,jmid+1), ilast, blksize)
×
UNCOV
926
        return op(v1, v2)
×
927
    end
928
end
929

UNCOV
930
mapreduce_impl(f::F, op::OP, A::AbstractArrayOrBroadcasted, ifirst::SCartesianIndex2, ilast::SCartesianIndex2) where {F,OP} =
×
931
    mapreduce_impl(f, op, A, ifirst, ilast, pairwise_blocksize(f, op))
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc