• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

realm / realm-core / 1711

28 Sep 2023 09:16PM UTC coverage: 91.2% (-0.02%) from 91.216%
1711

push

Evergreen

web-flow
remove unnecessary upload/download waiters in tests (#6992)

95848 of 175738 branches covered (0.0%)

232470 of 254901 relevant lines covered (91.2%)

6754237.13 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

75.72
/src/realm/bplustree.cpp
1
/*************************************************************************
2
 *
3
 * Copyright 2018 Realm Inc.
4
 *
5
 * Licensed under the Apache License, Version 2.0 (the "License");
6
 * you may not use this file except in compliance with the License.
7
 * You may obtain a copy of the License at
8
 *
9
 * http://www.apache.org/licenses/LICENSE-2.0
10
 *
11
 * Unless required by applicable law or agreed to in writing, software
12
 * distributed under the License is distributed on an "AS IS" BASIS,
13
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
 * See the License for the specific language governing permissions and
15
 * limitations under the License.
16
 *
17
 **************************************************************************/
18

19
#include <realm/bplustree.hpp>
20
#include <realm/impl/destroy_guard.hpp>
21
#include <realm/array_unsigned.hpp>
22

23
using namespace realm;
24

25
namespace realm {
26

27
bool BPlusTreeNode::get_context_flag() const noexcept
28
{
6,659,205✔
29
    auto ref = get_ref();
6,659,205✔
30
    MemRef mem(ref, m_tree->get_alloc());
6,659,205✔
31
    return Array::get_context_flag_from_header(mem.get_addr());
6,659,205✔
32
}
6,659,205✔
33

34
void BPlusTreeNode::set_context_flag(bool cf) noexcept
35
{
24,306✔
36
    auto ref = get_ref();
24,306✔
37
    MemRef mem(ref, m_tree->get_alloc());
24,306✔
38
    if (Array::get_context_flag_from_header(mem.get_addr()) != cf) {
24,306✔
39
        Array arr(m_tree->get_alloc());
522✔
40
        arr.init_from_mem(mem);
522✔
41
        arr.set_context_flag(cf);
522✔
42
        if (auto new_ref = arr.get_ref(); new_ref != ref) {
522✔
43
            init_from_ref(new_ref);
6✔
44
            update_parent();
6✔
45
        }
6✔
46
    }
522✔
47
}
24,306✔
48

49

50
/*****************************************************************************/
51
/* BPlusTreeInner                                                            */
52
/* All interior nodes is of this class                                       */
53
/*****************************************************************************/
54
class BPlusTreeInner : public BPlusTreeNode, private Array {
55
public:
56
    using Array::destroy_deep;
57

58
    BPlusTreeInner(BPlusTreeBase* tree);
59
    ~BPlusTreeInner() override;
60

61
    void init_from_mem(MemRef mem);
62
    void create(size_t elems_per_child);
63

64
    /******** Implementation of abstract interface *******/
65
    bool is_leaf() const override
66
    {
339,246✔
67
        return false;
339,246✔
68
    }
339,246✔
69
    bool is_compact() const override
70
    {
×
71
        return (Array::get(0) & 1) != 0;
×
72
    }
×
73
    ref_type get_ref() const override
74
    {
73,644✔
75
        return Array::get_ref();
73,644✔
76
    }
73,644✔
77

78
    void init_from_ref(ref_type ref) noexcept override
79
    {
7,309,275✔
80
        char* header = m_alloc.translate(ref);
7,309,275✔
81
        init_from_mem(MemRef(header, ref, m_alloc));
7,309,275✔
82
    }
7,309,275✔
83

84
    void bp_set_parent(ArrayParent* p, size_t n) override
85
    {
8,327,721✔
86
        Array::set_parent(p, n);
8,327,721✔
87
    }
8,327,721✔
88
    void update_parent() override
89
    {
12,399✔
90
        Array::update_parent();
12,399✔
91
    }
12,399✔
92

93
    size_t get_node_size() const override
94
    {
14,872,653✔
95
        return Array::size() - 2;
14,872,653✔
96
    }
14,872,653✔
97
    size_t get_tree_size() const override
98
    {
15,958,317✔
99
        return size_t(back()) >> 1;
15,958,317✔
100
    }
15,958,317✔
101

102
    ref_type bptree_insert(size_t n, State& state, InsertFunc) override;
103
    void bptree_access(size_t n, AccessFunc) override;
104
    size_t bptree_erase(size_t n, EraseFunc) override;
105
    bool bptree_traverse(TraverseFunc) override;
106
    void verify() const override;
107

108
    // Other modifiers
109

110
    void append_tree_size(size_t sz)
111
    {
12,399✔
112
        Array::add((sz << 1) + 1);
12,399✔
113
    }
12,399✔
114
    void add_bp_node_ref(ref_type ref, int64_t offset = 0)
115
    {
24,798✔
116
        Array::add(from_ref(ref));
24,798✔
117
        REALM_ASSERT_DEBUG(offset >= 0);
24,798✔
118
        if (offset && m_offsets.is_attached()) {
24,798✔
119
            m_offsets.add(offset);
126✔
120
        }
126✔
121
    }
24,798✔
122
    // Reset first (and only!) child ref and return the previous value
123
    ref_type clear_first_bp_node_ref()
124
    {
168✔
125
        REALM_ASSERT(get_node_size() == 1);
168✔
126
        ref_type old_ref = Array::get_as_ref(1);
168✔
127
        Array::set(1, 0);
168✔
128

84✔
129
        return old_ref;
168✔
130
    }
168✔
131
    void ensure_offsets();
132

133
private:
134
    ArrayUnsigned m_offsets;
135
    size_t m_my_offset;
136

137
    void move(BPlusTreeNode* new_node, size_t ndx, int64_t offset_adj) override;
138
    void set_offset(size_t offs)
139
    {
×
140
        m_my_offset = offs;
×
141
    }
×
142
    void set_tree_size(size_t sz)
143
    {
×
144
        Array::set(m_size - 1, (sz << 1) + 1);
×
145
    }
×
146
    size_t get_elems_per_child() const
147
    {
18,975,936✔
148
        // Only relevant when in compact form
9,333,195✔
149
        REALM_ASSERT_DEBUG(!m_offsets.is_attached());
18,975,936✔
150
        return size_t(Array::get(0)) >> 1;
18,975,936✔
151
    }
18,975,936✔
152
    // Should not be mixed up with Array::get_child_ref
153
    ref_type get_bp_node_ref(size_t ndx) const noexcept
154
    {
19,147,941✔
155
        return Array::get_as_ref(ndx + 1);
19,147,941✔
156
    }
19,147,941✔
157
    void insert_bp_node_ref(size_t ndx, ref_type ref)
158
    {
5,778✔
159
        Array::insert(ndx + 1, from_ref(ref));
5,778✔
160
    }
5,778✔
161
    BPlusTreeLeaf* cache_leaf(MemRef mem, size_t ndx, size_t offset);
162
    void erase_and_destroy_bp_node(size_t ndx);
163
    ref_type insert_bp_node(size_t child_ndx, ref_type new_sibling_ref, State& state);
164
    size_t get_bp_node_offset(size_t child_ndx) const
165
    {
130,248✔
166
        return (child_ndx) > 0 ? size_t(m_offsets.get(child_ndx - 1)) : 0;
107,238✔
167
    }
130,248✔
168
};
169
}
170

171
/****************************** BPlusTreeNode ********************************/
172

173
BPlusTreeNode::~BPlusTreeNode()
174
{
34,067,592✔
175
}
34,067,592✔
176

177
/****************************** BPlusTreeLeaf ********************************/
178

179
ref_type BPlusTreeLeaf::bptree_insert(size_t ndx, State& state, InsertFunc func)
180
{
66,409,857✔
181
    size_t leaf_size = get_node_size();
66,409,857✔
182
    REALM_ASSERT_DEBUG(leaf_size <= REALM_MAX_BPNODE_SIZE);
66,409,857✔
183
    if (ndx > leaf_size)
66,409,857✔
184
        ndx = leaf_size;
66,204,300✔
185
    if (REALM_LIKELY(leaf_size < REALM_MAX_BPNODE_SIZE)) {
66,456,669✔
186
        func(this, ndx);
66,378,735✔
187
        m_tree->adjust_leaf_bounds(1);
66,378,735✔
188
        return 0; // Leaf was not split
66,378,735✔
189
    }
66,378,735✔
190

2,147,483,647✔
191
    // Split leaf node
2,147,483,647✔
192
    auto new_leaf = m_tree->create_leaf_node();
2,147,561,581✔
193
    if (ndx == leaf_size) {
2,147,561,581✔
194
        func(new_leaf.get(), 0);
18,027✔
195
        state.split_offset = ndx;
18,027✔
196
    }
18,027✔
197
    else {
2,147,552,344✔
198
        move(new_leaf.get(), ndx, 0);
2,147,552,344✔
199
        func(this, ndx);
2,147,552,344✔
200
        state.split_offset = ndx + 1;
2,147,552,344✔
201
        // Invalidate cached leaf
2,147,483,647✔
202
        m_tree->invalidate_leaf_cache();
2,147,552,344✔
203
    }
2,147,552,344✔
204
    state.split_size = leaf_size + 1;
2,147,561,581✔
205

2,147,483,647✔
206
    return new_leaf->get_ref();
2,147,561,581✔
207
}
2,147,561,581✔
208

209
void BPlusTreeLeaf::bptree_access(size_t ndx, AccessFunc func)
210
{
53,879,004✔
211
    func(this, ndx);
53,879,004✔
212
}
53,879,004✔
213

214
size_t BPlusTreeLeaf::bptree_erase(size_t ndx, EraseFunc func)
215
{
1,990,722✔
216
    return func(this, ndx);
1,990,722✔
217
}
1,990,722✔
218

219
bool BPlusTreeLeaf::bptree_traverse(TraverseFunc func)
220
{
1,338,879✔
221
    return func(this, 0) == IteratorControl::Stop;
1,338,879✔
222
}
1,338,879✔
223

224
/****************************** BPlusTreeInner *******************************/
225

226
BPlusTreeInner::BPlusTreeInner(BPlusTreeBase* tree)
227
    : BPlusTreeNode(tree)
228
    , Array(tree->get_alloc())
229
    , m_offsets(tree->get_alloc())
230
    , m_my_offset(0)
231
{
7,147,881✔
232
    m_offsets.set_parent(this, 0);
7,147,881✔
233
}
7,147,881✔
234

235
void BPlusTreeInner::create(size_t elems_per_child)
236
{
12,399✔
237
    // Born only with room for number of elements per child
5,817✔
238
    uint64_t tagged = (elems_per_child << 1) + 1;
12,399✔
239
    Array::create(Array::type_InnerBptreeNode, false, 1, tagged);
12,399✔
240
}
12,399✔
241

242
BPlusTreeInner::~BPlusTreeInner()
243
{
7,151,283✔
244
}
7,151,283✔
245

246
void BPlusTreeInner::init_from_mem(MemRef mem)
247
{
7,307,760✔
248
    Array::init_from_mem(mem);
7,307,760✔
249
    auto rot = Array::get(0);
7,307,760✔
250
    if ((rot & 1) == 0) {
7,307,760✔
251
        // rot is a ref
4,557✔
252
        m_offsets.init_from_ref(to_ref(rot));
9,114✔
253
    }
9,114✔
254
    else {
7,298,646✔
255
        m_offsets.detach();
7,298,646✔
256
    }
7,298,646✔
257
}
7,307,760✔
258

259
void BPlusTreeInner::bptree_access(size_t n, AccessFunc func)
260
{
4,365,306✔
261
    size_t child_ndx;
4,365,306✔
262
    size_t child_offset;
4,365,306✔
263
    if (m_offsets.is_attached()) {
4,365,306✔
264
        child_ndx = m_offsets.upper_bound(n);
37,152✔
265
        child_offset = get_bp_node_offset(child_ndx);
37,152✔
266
        REALM_ASSERT_3(child_ndx, <, get_node_size());
37,152✔
267
    }
37,152✔
268
    else {
4,328,154✔
269
        auto elems_per_child = get_elems_per_child();
4,328,154✔
270
        child_ndx = n / elems_per_child;
4,328,154✔
271
        child_offset = child_ndx * elems_per_child;
4,328,154✔
272
    }
4,328,154✔
273

2,192,496✔
274
    ref_type child_ref = get_bp_node_ref(child_ndx);
4,365,306✔
275
    char* child_header = m_alloc.translate(child_ref);
4,365,306✔
276
    MemRef mem(child_header, child_ref, m_alloc);
4,365,306✔
277
    bool child_is_leaf = !Array::get_is_inner_bptree_node_from_header(child_header);
4,365,306✔
278
    if (child_is_leaf) {
4,365,306✔
279
        auto leaf = cache_leaf(mem, child_ndx, child_offset + m_my_offset);
4,365,198✔
280
        func(leaf, n - child_offset);
4,365,198✔
281
    }
4,365,198✔
282
    else {
108✔
283
        BPlusTreeInner node(m_tree);
108✔
284
        node.set_parent(this, child_ndx + 1);
108✔
285
        node.init_from_mem(mem);
108✔
286
        node.set_offset(child_offset + m_my_offset);
108✔
287
        node.bptree_access(n - child_offset, func);
108✔
288
    }
108✔
289
}
4,365,306✔
290

291
ref_type BPlusTreeInner::bptree_insert(size_t ndx, State& state, InsertFunc func)
292
{
8,660,043✔
293
    size_t child_ndx;
8,660,043✔
294
    size_t child_offset;
8,660,043✔
295
    if (ndx != npos) {
8,660,043✔
296
        ensure_offsets();
17,898✔
297
        child_ndx = m_offsets.upper_bound(ndx);
17,898✔
298
        child_offset = get_bp_node_offset(child_ndx);
17,898✔
299
        ndx -= child_offset;
17,898✔
300
        REALM_ASSERT_3(child_ndx, <, get_node_size());
17,898✔
301
    }
17,898✔
302
    else {
8,642,145✔
303
        child_ndx = get_node_size() - 1;
8,642,145✔
304
        if (m_offsets.is_attached()) {
8,642,145✔
305
            child_offset = get_bp_node_offset(child_ndx);
×
306
            REALM_ASSERT_3(child_ndx, <, get_node_size());
×
307
        }
×
308
        else {
8,642,145✔
309
            auto elems_per_child = get_elems_per_child();
8,642,145✔
310
            child_offset = child_ndx * elems_per_child;
8,642,145✔
311
        }
8,642,145✔
312
    }
8,642,145✔
313

4,171,926✔
314
    ref_type child_ref = get_bp_node_ref(child_ndx);
8,660,043✔
315
    char* child_header = m_alloc.translate(child_ref);
8,660,043✔
316
    MemRef mem(child_header, child_ref, m_alloc);
8,660,043✔
317
    bool child_is_leaf = !Array::get_is_inner_bptree_node_from_header(child_header);
8,660,043✔
318
    ref_type new_sibling_ref;
8,660,043✔
319
    if (child_is_leaf) {
8,660,043✔
320
        auto leaf = cache_leaf(mem, child_ndx, child_offset + m_my_offset);
8,647,773✔
321
        new_sibling_ref = leaf->bptree_insert(ndx, state, func);
8,647,773✔
322
    }
8,647,773✔
323
    else {
12,270✔
324
        BPlusTreeInner node(m_tree);
12,270✔
325
        node.set_parent(this, child_ndx + 1);
12,270✔
326
        node.init_from_mem(mem);
12,270✔
327
        node.set_offset(child_offset + m_my_offset);
12,270✔
328
        new_sibling_ref = node.bptree_insert(ndx, state, func);
12,270✔
329
    }
12,270✔
330

4,171,926✔
331
    if (!new_sibling_ref) {
8,660,043✔
332
        adjust(size() - 1, +2); // Throws
8,621,103✔
333
        if (m_offsets.is_attached()) {
8,621,103✔
334
            m_offsets.adjust(child_ndx, m_offsets.size(), 1);
17,874✔
335
        }
17,874✔
336
        return 0;
8,621,103✔
337
    }
8,621,103✔
338

32,127✔
339
    return insert_bp_node(child_ndx, new_sibling_ref, state);
38,940✔
340
}
38,940✔
341

342
size_t BPlusTreeInner::bptree_erase(size_t n, EraseFunc func)
343
{
69,174✔
344
    ensure_offsets();
69,174✔
345

34,587✔
346
    size_t child_ndx = m_offsets.upper_bound(n);
69,174✔
347
    size_t child_offset = get_bp_node_offset(child_ndx);
69,174✔
348
    REALM_ASSERT_3(child_ndx, <, get_node_size());
69,174✔
349

34,587✔
350
    // Call bptree_erase recursively and ultimately call 'func' on leaf
34,587✔
351
    size_t erase_node_size;
69,174✔
352
    ref_type child_ref = get_bp_node_ref(child_ndx);
69,174✔
353
    char* child_header = m_alloc.translate(child_ref);
69,174✔
354
    MemRef mem(child_header, child_ref, m_alloc);
69,174✔
355
    BPlusTreeLeaf* leaf = nullptr;
69,174✔
356
    BPlusTreeInner node(m_tree);
69,174✔
357
    bool child_is_leaf = !Array::get_is_inner_bptree_node_from_header(child_header);
69,174✔
358
    if (child_is_leaf) {
69,174✔
359
        leaf = cache_leaf(mem, child_ndx, child_offset + m_my_offset);
69,174✔
360
        erase_node_size = func(leaf, n - child_offset);
69,174✔
361
        if (erase_node_size == 0) {
69,174✔
362
            m_tree->invalidate_leaf_cache();
48✔
363
        }
48✔
364
        else {
69,126✔
365
            m_tree->adjust_leaf_bounds(-1);
69,126✔
366
        }
69,126✔
367
    }
69,174✔
368
    else {
×
369
        node.set_parent(this, child_ndx + 1);
×
370
        node.init_from_mem(mem);
×
371
        node.set_offset(child_offset + m_my_offset);
×
372
        erase_node_size = node.bptree_erase(n - child_offset, func);
×
373
    }
×
374

34,587✔
375
    adjust(size() - 1, -2);
69,174✔
376
    m_offsets.adjust(child_ndx, m_offsets.size(), -1);
69,174✔
377

34,587✔
378
    // Check if some nodes could be merged
34,587✔
379
    size_t num_children = get_node_size();
69,174✔
380
    if (erase_node_size == 0) {
69,174✔
381
        if (num_children == 1) {
48✔
382
            // Only child empty - delete this one too
383
            return 0;
×
384
        }
×
385
        // Child leaf is empty - destroy it!
24✔
386
        erase_and_destroy_bp_node(child_ndx);
48✔
387
        return num_children - 1;
48✔
388
    }
48✔
389
    else if (erase_node_size < REALM_MAX_BPNODE_SIZE / 2 && child_ndx < (num_children - 1)) {
69,126✔
390
        // Candidate for merge. First calculate if the combined size of current and
15,486✔
391
        // next sibling is small enough.
15,486✔
392
        size_t sibling_ndx = child_ndx + 1;
30,972✔
393
        ref_type sibling_ref = get_bp_node_ref(sibling_ndx);
30,972✔
394
        std::unique_ptr<BPlusTreeLeaf> sibling_leaf;
30,972✔
395
        BPlusTreeInner node2(m_tree);
30,972✔
396
        BPlusTreeNode* sibling_node;
30,972✔
397
        if (child_is_leaf) {
30,972✔
398
            sibling_leaf = m_tree->init_leaf_node(sibling_ref);
30,972✔
399
            sibling_node = sibling_leaf.get();
30,972✔
400
        }
30,972✔
401
        else {
×
402
            node2.init_from_ref(sibling_ref);
×
403
            sibling_node = &node2;
×
404
        }
×
405
        sibling_node->bp_set_parent(this, sibling_ndx + 1);
30,972✔
406

15,486✔
407
        size_t combined_size = sibling_node->get_node_size() + erase_node_size;
30,972✔
408

15,486✔
409
        if (combined_size < REALM_MAX_BPNODE_SIZE * 3 / 4) {
30,972✔
410
            // Combined size is small enough for the nodes to be merged
66✔
411
            // Move all content from the next sibling into current node
66✔
412
            int64_t offs_adj = 0;
132✔
413
            if (child_is_leaf) {
132✔
414
                REALM_ASSERT(leaf);
132✔
415
                if (sibling_ndx < m_offsets.size())
132✔
416
                    m_offsets.set(sibling_ndx - 1, m_offsets.get(sibling_ndx));
×
417
                sibling_node->move(leaf, 0, 0);
132✔
418
                // Invalidate cached leaf
66✔
419
                m_tree->invalidate_leaf_cache();
132✔
420
            }
132✔
421
            else {
×
422
                node.ensure_offsets();
×
423
                node2.ensure_offsets();
×
424
                // Offset adjustment is negative as the sibling has bigger offsets
425
                auto sibling_offs = m_offsets.get(sibling_ndx - 1);
×
426
                auto erase_node_offs = get_bp_node_offset(child_ndx);
×
427
                offs_adj = erase_node_offs - sibling_offs;
×
428
                if (sibling_ndx < m_offsets.size())
×
429
                    m_offsets.set(sibling_ndx - 1, m_offsets.get(sibling_ndx));
×
430

431
                size_t orig_size = node.get_tree_size();
×
432
                size_t size_of_moved = node2.get_tree_size();
×
433
                // Remove size field from node as the move operation will just add
434
                node.erase(node.size() - 1);
×
435
                node2.move(&node, 0, offs_adj);
×
436
                node.append_tree_size(orig_size + size_of_moved);
×
437
            }
×
438

66✔
439
            // Destroy sibling
66✔
440
            erase_and_destroy_bp_node(sibling_ndx);
132✔
441
            num_children--;
132✔
442
        }
132✔
443
    }
30,972✔
444

34,587✔
445
    return num_children;
69,150✔
446
}
69,174✔
447

448
bool BPlusTreeInner::bptree_traverse(TraverseFunc func)
449
{
6,039,255✔
450
    size_t sz = get_node_size();
6,039,255✔
451
    for (size_t i = 0; i < sz; i++) {
6,057,843✔
452
        size_t child_offset;
6,054,663✔
453
        if (m_offsets.is_attached()) {
6,054,663✔
454
            child_offset = get_bp_node_offset(i);
6,000✔
455
        }
6,000✔
456
        else {
6,048,663✔
457
            auto elems_per_child = get_elems_per_child();
6,048,663✔
458
            child_offset = i * elems_per_child;
6,048,663✔
459
        }
6,048,663✔
460

3,027,351✔
461
        bool done;
6,054,663✔
462
        ref_type child_ref = get_bp_node_ref(i);
6,054,663✔
463
        char* child_header = m_alloc.translate(child_ref);
6,054,663✔
464
        MemRef mem(child_header, child_ref, m_alloc);
6,054,663✔
465
        bool child_is_leaf = !Array::get_is_inner_bptree_node_from_header(child_header);
6,054,663✔
466
        if (child_is_leaf) {
6,054,663✔
467
            auto leaf = cache_leaf(mem, i, child_offset + m_my_offset);
6,054,663✔
468
            done = (func(leaf, child_offset + m_my_offset) == IteratorControl::Stop);
6,054,663✔
469
        }
6,054,663✔
470
        else {
×
471
            BPlusTreeInner node(m_tree);
×
472
            node.set_parent(this, i + 1);
×
473
            node.init_from_mem(mem);
×
474
            node.set_offset(child_offset + m_my_offset);
×
475
            // std::cout << "BPlusTreeInner offset: " << child_offset << std::endl;
476
            done = node.bptree_traverse(func);
×
477
        }
×
478
        if (done) {
6,054,663✔
479
            return true;
6,036,075✔
480
        }
6,036,075✔
481
    }
6,054,663✔
482
    return false;
3,021,228✔
483
}
6,039,255✔
484

485
void BPlusTreeInner::move(BPlusTreeNode* new_node, size_t ndx, int64_t adj)
486
{
×
487
    BPlusTreeInner* dst(static_cast<BPlusTreeInner*>(new_node));
×
488
    size_t sz = get_node_size();
×
489

490
    // Copy refs
491
    for (size_t i = ndx; i < sz; i++) {
×
492
        size_t offs = get_bp_node_offset(i);
×
493
        dst->add_bp_node_ref(get_bp_node_ref(i), offs - adj);
×
494
    }
×
495
    truncate(ndx + 1);
×
496
    if (ndx > 0)
×
497
        m_offsets.truncate(ndx - 1);
×
498
}
×
499

500
void BPlusTreeInner::ensure_offsets()
501
{
87,198✔
502
    if (!m_offsets.is_attached()) {
87,198✔
503
        size_t elems_per_child = get_elems_per_child();
198✔
504
        size_t sz = size();
198✔
505
        size_t num_offsets = (sz > 2) ? sz - 3 : 0;
162✔
506
        size_t offs = 0;
198✔
507

99✔
508
        m_offsets.create(num_offsets, num_offsets * elems_per_child);
198✔
509
        for (size_t i = 0; i != num_offsets; ++i) {
282✔
510
            offs += elems_per_child;
84✔
511
            m_offsets.set(i, offs);
84✔
512
        }
84✔
513
        Array::set_as_ref(0, m_offsets.get_ref());
198✔
514
    }
198✔
515
}
87,198✔
516

517
inline BPlusTreeLeaf* BPlusTreeInner::cache_leaf(MemRef mem, size_t ndx, size_t offset)
518
{
19,087,347✔
519
    BPlusTreeLeaf* leaf = m_tree->cache_leaf(mem);
19,087,347✔
520
    leaf->bp_set_parent(this, ndx + 1);
19,087,347✔
521
    size_t sz = leaf->get_node_size();
19,087,347✔
522
    m_tree->set_leaf_bounds(offset, offset + sz);
19,087,347✔
523

9,385,815✔
524
    return leaf;
19,087,347✔
525
}
19,087,347✔
526

527
void BPlusTreeInner::erase_and_destroy_bp_node(size_t ndx)
528
{
180✔
529
    ref_type ref = get_bp_node_ref(ndx);
180✔
530
    erase(ndx + 1);
180✔
531
    Array::destroy_deep(ref, m_tree->get_alloc());
180✔
532
    REALM_ASSERT_DEBUG(m_offsets.is_attached());
180✔
533
    size_t sz = m_offsets.size();
180✔
534
    if (sz) {
180✔
535
        // in this case there will always be an offset to erase
90✔
536
        if (ndx < sz) {
180✔
537
            m_offsets.erase(ndx);
×
538
        }
×
539
        else {
180✔
540
            m_offsets.erase(sz - 1);
180✔
541
        }
180✔
542
    }
180✔
543
    REALM_ASSERT_DEBUG(m_offsets.size() == get_node_size() - 1);
180✔
544
}
180✔
545

546
ref_type BPlusTreeInner::insert_bp_node(size_t child_ndx, ref_type new_sibling_ref, State& state)
547
{
5,778✔
548
    size_t new_ref_ndx = child_ndx + 1;
5,778✔
549

3,048✔
550
    size_t sz = get_node_size();
5,778✔
551
    if (sz < REALM_MAX_BPNODE_SIZE) {
5,778✔
552
        // Room in current node for the new child
3,048✔
553
        adjust(size() - 1, +2); // Throws
5,778✔
554
        if (m_offsets.is_attached()) {
5,778✔
555
            size_t elem_ndx_offset = get_bp_node_offset(child_ndx);
24✔
556
            m_offsets.insert(child_ndx, elem_ndx_offset + state.split_offset); // Throws
24✔
557
            m_offsets.adjust(child_ndx + 1, m_offsets.size(), +1);             // Throws
24✔
558
        }
24✔
559
        insert_bp_node_ref(new_ref_ndx, new_sibling_ref);
5,778✔
560
        return ref_type(0);
5,778✔
561
    }
5,778✔
562

563
    // This node has to be split
564
    BPlusTreeInner new_sibling(m_tree);
×
565

566
    size_t elem_ndx_offset = 0;
×
567
    if (m_offsets.is_attached()) {
×
568
        new_sibling.create(0);
×
569
        new_sibling.ensure_offsets();
×
570
        elem_ndx_offset = get_bp_node_offset(child_ndx);
×
571
    }
×
572
    else {
×
573
        size_t elems_per_child = get_elems_per_child();
×
574
        elem_ndx_offset = child_ndx * elems_per_child;
×
575
        new_sibling.create(elems_per_child);
×
576
    }
×
577

578
    size_t new_split_offset;
×
579
    size_t new_split_size;
×
580
    if (new_ref_ndx == sz) {
×
581
        // Case 1/2: The split child was the last child of the parent
582
        // to be split. In this case the parent may or may not be on
583
        // the compact form.
584
        new_split_offset = size_t(elem_ndx_offset + state.split_offset);
×
585
        new_split_size = elem_ndx_offset + state.split_size;
×
586
        new_sibling.add_bp_node_ref(new_sibling_ref); // Throws
×
587
        set_tree_size(new_split_offset);             // Throws
×
588
    }
×
589
    else {
×
590
        // Case 2/2: The split child was not the last child of the
591
        // parent to be split. Since this is not possible during
592
        // 'append', we can safely assume that the parent node is on
593
        // the general form.
594
        new_split_offset = size_t(elem_ndx_offset + state.split_size);
×
595
        new_split_size = get_tree_size() + 1;
×
596

597
        move(&new_sibling, new_ref_ndx, (new_split_offset - 1));               // Strips off tree size
×
598
        add_bp_node_ref(new_sibling_ref, elem_ndx_offset + state.split_offset); // Throws
×
599
        append_tree_size(new_split_offset);                                    // Throws
×
600
    }
×
601

602
    new_sibling.append_tree_size(new_split_size - new_split_offset); // Throws
×
603

604
    state.split_offset = new_split_offset;
×
605
    state.split_size = new_split_size;
×
606

607
    return new_sibling.get_ref();
×
608
}
×
609

610
void BPlusTreeInner::verify() const
611
{
×
612
#ifdef REALM_DEBUG
×
613
    Array::verify();
×
614

615
    // This node must not be a leaf
616
    REALM_ASSERT_3(Array::get_type(), ==, Array::type_InnerBptreeNode);
×
617

618
    REALM_ASSERT_3(Array::size(), >=, 2);
×
619
    size_t num_children = get_node_size();
×
620

621
    // Verify invar:bptree-nonempty-inner
622
    REALM_ASSERT_3(num_children, >=, 1);
×
623

624
    size_t elems_per_child = 0;
×
625
    if (m_offsets.is_attached()) {
×
626
        REALM_ASSERT_DEBUG(m_offsets.size() == num_children - 1);
×
627
    }
×
628
    else {
×
629
        elems_per_child = get_elems_per_child();
×
630
    }
×
631

632
    size_t num_elems = 0;
×
633
    for (size_t i = 0; i < num_children; i++) {
×
634
        ref_type child_ref = get_bp_node_ref(i);
×
635
        char* child_header = m_alloc.translate(child_ref);
×
636
        MemRef mem(child_header, child_ref, m_alloc);
×
637
        bool child_is_leaf = !Array::get_is_inner_bptree_node_from_header(child_header);
×
638
        size_t elems_in_child;
×
639
        if (child_is_leaf) {
×
640
            auto leaf = const_cast<BPlusTreeInner*>(this)->cache_leaf(mem, i, 0);
×
641
            elems_in_child = leaf->get_node_size();
×
642
            leaf->verify();
×
643
        }
×
644
        else {
×
645
            BPlusTreeInner node(m_tree);
×
646
            node.set_parent(const_cast<BPlusTreeInner*>(this), i + 1);
×
647
            node.init_from_mem(mem);
×
648
            node.verify();
×
649
            elems_in_child = node.get_tree_size();
×
650
        }
×
651
        num_elems += elems_in_child;
×
652
        if (m_offsets.is_attached()) {
×
653
            if (i < num_children - 1) {
×
654
                REALM_ASSERT_DEBUG(num_elems == m_offsets.get(i));
×
655
            }
×
656
        }
×
657
        else {
×
658
            if (i < num_children - 1) {
×
659
                REALM_ASSERT_DEBUG(elems_in_child == elems_per_child);
×
660
            }
×
661
            else {
×
662
                REALM_ASSERT_DEBUG(elems_in_child <= elems_per_child);
×
663
            }
×
664
        }
×
665
    }
×
666
    REALM_ASSERT_DEBUG(get_tree_size() == num_elems);
×
667
    m_tree->invalidate_leaf_cache();
×
668
#endif
×
669
}
×
670

671
/****************************** BPlusTreeBase ********************************/
672

673
BPlusTreeBase::~BPlusTreeBase()
674
{
17,166,954✔
675
}
17,166,954✔
676

677
void BPlusTreeBase::create()
678
{
2,104,059✔
679
    if (!m_root) {                   // Idempotent
2,243,976✔
680
        m_root = create_leaf_node(); // Throws
2,243,976✔
681
        if (m_parent) {
2,243,976✔
682
            ref_type ref = get_ref();
568,542✔
683
            _impl::DeepArrayRefDestroyGuard destroy_guard{ref, get_alloc()};
568,542✔
684
            m_parent->update_child_ref(m_ndx_in_parent, ref); // Throws
568,542✔
685
            destroy_guard.release();
568,542✔
686
        }
568,542✔
687
        m_root->bp_set_parent(m_parent, m_ndx_in_parent);
2,243,976✔
688
    }
2,243,976✔
689
}
2,104,059✔
690

691
void BPlusTreeBase::destroy()
692
{
2,044,014✔
693
    if (is_attached()) {
2,044,014✔
694
        ref_type ref = m_root->get_ref();
1,675,470✔
695
        Array::destroy_deep(ref, m_alloc);
1,675,470✔
696
        m_root = nullptr;
1,675,470✔
697
    }
1,675,470✔
698
    invalidate_leaf_cache();
2,044,014✔
699
}
2,044,014✔
700

701
void BPlusTreeBase::replace_root(std::unique_ptr<BPlusTreeNode> new_root)
702
{
12,567✔
703
    // Maintain parent.
5,901✔
704
    new_root->bp_set_parent(m_parent, m_ndx_in_parent);
12,567✔
705
    new_root->update_parent(); // Throws
12,567✔
706

5,901✔
707
    m_root = std::move(new_root);
12,567✔
708
}
12,567✔
709

710
void BPlusTreeBase::bptree_insert(size_t n, BPlusTreeNode::InsertFunc func)
711
{
66,516,489✔
712
    size_t bptree_size = m_root->get_tree_size();
66,516,489✔
713
    if (n == bptree_size) {
66,516,489✔
714
        n = npos;
10,072,410✔
715
    }
10,072,410✔
716
    BPlusTreeNode::State state;
66,516,489✔
717
    ref_type new_sibling_ref = m_root->bptree_insert(n, state, func);
66,516,489✔
718
    if (REALM_UNLIKELY(new_sibling_ref)) {
66,516,489✔
719
        bool compact_form = (n == npos) && m_root->is_compact();
12,399✔
720
        auto new_root = std::make_unique<BPlusTreeInner>(this);
12,399✔
721
        if (!compact_form) {
12,399✔
722
            new_root->create(0);
126✔
723
            new_root->ensure_offsets();
126✔
724
        }
126✔
725
        else {
12,273✔
726
            new_root->create(size_t(state.split_offset));
12,273✔
727
        }
12,273✔
728

5,817✔
729
        new_root->add_bp_node_ref(m_root->get_ref());                   // Throws
12,399✔
730
        new_root->add_bp_node_ref(new_sibling_ref, state.split_offset); // Throws
12,399✔
731
        new_root->append_tree_size(state.split_size);
12,399✔
732
        replace_root(std::move(new_root));
12,399✔
733
    }
12,399✔
734
}
66,516,489✔
735

736
void BPlusTreeBase::bptree_erase(size_t n, BPlusTreeNode::EraseFunc func)
737
{
2,059,809✔
738
    size_t root_size = m_root->bptree_erase(n, func);
2,059,809✔
739
    while (!m_root->is_leaf() && root_size == 1) {
2,059,977✔
740
        BPlusTreeInner* node = static_cast<BPlusTreeInner*>(m_root.get());
168✔
741

84✔
742
        ref_type new_root_ref = node->clear_first_bp_node_ref();
168✔
743
        node->destroy_deep();
168✔
744

84✔
745
        auto new_root = create_root_from_ref(new_root_ref);
168✔
746

84✔
747
        replace_root(std::move(new_root));
168✔
748
        root_size = m_root->get_node_size();
168✔
749
    }
168✔
750
}
2,059,809✔
751

752
std::unique_ptr<BPlusTreeNode> BPlusTreeBase::create_root_from_ref(ref_type ref)
753
{
18,276,576✔
754
    char* header = m_alloc.translate(ref);
18,276,576✔
755
    bool is_leaf = !Array::get_is_inner_bptree_node_from_header(header);
18,276,576✔
756
    bool reuse_root = m_root && m_root->is_leaf() == is_leaf;
18,276,576✔
757

9,275,676✔
758
    if (reuse_root) {
18,276,576✔
759
        m_root->init_from_ref(ref);
3,610,746✔
760
        return std::move(m_root);
3,610,746✔
761
    }
3,610,746✔
762

7,356,660✔
763
    if (is_leaf) {
14,665,830✔
764
        return init_leaf_node(ref);
7,645,791✔
765
    }
7,645,791✔
766
    else {
7,020,039✔
767
        std::unique_ptr<BPlusTreeNode> new_root = std::make_unique<BPlusTreeInner>(this);
7,020,039✔
768
        new_root->init_from_ref(ref);
7,020,039✔
769
        return new_root;
7,020,039✔
770
    }
7,020,039✔
771
}
14,665,830✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc