• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

arangodb / velocypack / 3998645281

pending completion
3998645281

Pull #148

github

GitHub
Merge b1e3c924b into 5a28b6413
Pull Request #148: use separate namespace for xxh functions

0 of 5107 relevant lines covered (0.0%)

0.0 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/src/Builder.cpp
1
////////////////////////////////////////////////////////////////////////////////
2
/// DISCLAIMER
3
///
4
/// Copyright 2014-2020 ArangoDB GmbH, Cologne, Germany
5
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
6
///
7
/// Licensed under the Apache License, Version 2.0 (the "License");
8
/// you may not use this file except in compliance with the License.
9
/// You may obtain a copy of the License at
10
///
11
///     http://www.apache.org/licenses/LICENSE-2.0
12
///
13
/// Unless required by applicable law or agreed to in writing, software
14
/// distributed under the License is distributed on an "AS IS" BASIS,
15
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
/// See the License for the specific language governing permissions and
17
/// limitations under the License.
18
///
19
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
20
///
21
/// @author Max Neunhoeffer
22
/// @author Jan Steemann
23
////////////////////////////////////////////////////////////////////////////////
24

25
#include <array>
26
#include <memory>
27
#include <string_view>
28
#include <unordered_set>
29

30
#include "velocypack/velocypack-common.h"
31
#include "velocypack/Builder.h"
32
#include "velocypack/Dumper.h"
33
#include "velocypack/Iterator.h"
34
#include "velocypack/Sink.h"
35

36
using namespace arangodb::velocypack;
37

38
namespace {
39

40
// checks whether a memmove operation is allowed to get rid of the padding
41
bool isAllowedToMemmove(Options const* options, uint8_t const* start,
×
42
                        std::vector<ValueLength>::iterator indexStart,
43
                        std::vector<ValueLength>::iterator indexEnd,
44
                        ValueLength offsetSize) {
45
  VELOCYPACK_ASSERT(offsetSize == 1 || offsetSize == 2);
×
46

47
  if (options->paddingBehavior == Options::PaddingBehavior::NoPadding ||
×
48
      (offsetSize == 1 &&
×
49
       options->paddingBehavior == Options::PaddingBehavior::Flexible)) {
×
50
    std::size_t const distance = std::distance(indexStart, indexEnd);
×
51
    std::size_t const n = (std::min)(std::size_t(8 - 2 * offsetSize), distance);
×
52
    for (std::size_t i = 0; i < n; i++) {
×
53
      if (start[indexStart[i]] == 0x00) {
×
54
        return false;
×
55
      }
56
    }
57
    return true;
×
58
  }
59

60
  return false;
×
61
}
62

63
uint8_t determineArrayType(bool needIndexTable, ValueLength offsetSize) {
×
64
  uint8_t type;
65
  // Now build the table:
66
  if (needIndexTable) {
×
67
    type = 0x06;
×
68
  } else {  // no index table
69
    type = 0x02;
×
70
  }
71
  // Finally fix the byte width in the type byte:
72
  if (offsetSize == 2) {
×
73
    type += 1;
×
74
  } else if (offsetSize == 4) {
×
75
    type += 2;
×
76
  } else if (offsetSize == 8) {
×
77
    type += 3;
×
78
  }
79
  return type;
×
80
}
81

82
constexpr ValueLength linearAttributeUniquenessCutoff = 4;
83

84
// struct used when sorting index tables for objects:
85
struct SortEntry {
86
  uint8_t const* nameStart;
87
  uint64_t nameSize;
88
  uint64_t offset;
89
};
90

91
// minimum allocation done for the sortEntries vector
92
// this is used to overallocate memory so we can avoid some follow-up
93
// reallocations
94
constexpr size_t minSortEntriesAllocation = 32;
95

96
#ifndef VELOCYPACK_NO_THREADLOCALS
97

98
// thread-local, reusable buffer used for sorting medium to big index entries
99
thread_local std::unique_ptr<std::vector<SortEntry>> sortEntries;
100

101
// thread-local, reusable set to track usage of duplicate keys
102
thread_local std::unique_ptr<std::unordered_set<std::string_view>>
103
    duplicateKeys;
104

105
#endif
106

107
// Find the actual bytes of the attribute name of the VPack value
108
// at position base, also determine the length len of the attribute.
109
// This takes into account the different possibilities for the format
110
// of attribute names:
111
uint8_t const* findAttrName(uint8_t const* base, uint64_t& len) {
×
112
  uint8_t const b = *base;
×
113
  if (b >= 0x40 && b <= 0xbe) {
×
114
    // short UTF-8 string
115
    len = b - 0x40;
×
116
    return base + 1;
×
117
  }
118
  if (b == 0xbf) {
×
119
    // long UTF-8 string
120
    len = 0;
×
121
    // read string length
122
    for (std::size_t i = 8; i >= 1; i--) {
×
123
      len = (len << 8) + base[i];
×
124
    }
125
    return base + 1 + 8;  // string starts here
×
126
  }
127

128
  // translate attribute name
129
  return findAttrName(arangodb::velocypack::Slice(base).makeKey().start(), len);
×
130
}
131

132
bool checkAttributeUniquenessUnsortedBrute(ObjectIterator& it) {
×
133
  std::array<std::string_view, linearAttributeUniquenessCutoff> keys;
×
134

135
  do {
×
136
    // key(true) guarantees a String as returned type
137
    std::string_view key = it.key(true).stringView();
×
138

139
    ValueLength index = it.index();
×
140
    // compare with all other already looked-at keys
141
    for (ValueLength i = 0; i < index; ++i) {
×
142
      if (VELOCYPACK_UNLIKELY(keys[i] == key)) {
×
143
        return false;
×
144
      }
145
    }
146
    keys[index] = key;
×
147
    it.next();
×
148

149
  } while (it.valid());
×
150

151
  return true;
×
152
}
153

154
bool checkAttributeUniquenessUnsortedSet(ObjectIterator& it) {
×
155
#ifndef VELOCYPACK_NO_THREADLOCALS
156
  std::unique_ptr<std::unordered_set<std::string_view>>& tmp = ::duplicateKeys;
×
157

158
  if (::duplicateKeys == nullptr) {
×
159
    ::duplicateKeys = std::make_unique<std::unordered_set<std::string_view>>();
×
160
  } else {
161
    ::duplicateKeys->clear();
×
162
  }
163
#else
164
  auto tmp = std::make_unique<std::unordered_set<std::string_view>>();
165
#endif
166

167
  do {
×
168
    Slice key = it.key(true);
×
169
    // key(true) guarantees a String as returned type
170
    VELOCYPACK_ASSERT(key.isString());
×
171
    if (VELOCYPACK_UNLIKELY(!tmp->emplace(key.stringView()).second)) {
×
172
      // identical key
173
      return false;
×
174
    }
175
    it.next();
×
176
  } while (it.valid());
×
177

178
  return true;
×
179
}
180

181
}  // namespace
182

183
// create an empty Builder, using default Options
184
Builder::Builder()
×
185
    : _buffer(std::make_shared<Buffer<uint8_t>>()),
186
      _bufferPtr(_buffer.get()),
×
187
      _start(_bufferPtr->data()),
×
188
      _pos(0),
189
      _arena(),
190
      _stack(_arena),
×
191
      _keyWritten(false),
192
      options(&Options::Defaults) {
×
193
  // do a full initial allocation in the arena, so we can maximize its usage
194
  _stack.reserve(arenaSize / sizeof(decltype(_stack)::value_type));
×
195
}
×
196

197
// create an empty Builder, using Options
198
Builder::Builder(Options const* opts) : Builder() {
×
199
  if (VELOCYPACK_UNLIKELY(opts == nullptr)) {
×
200
    throw Exception(Exception::InternalError, "Options cannot be a nullptr");
×
201
  }
202
  options = opts;
×
203
}
×
204

205
// create an empty Builder, using an existing buffer and default Options
206
Builder::Builder(std::shared_ptr<Buffer<uint8_t>> buffer)
×
207
    : _buffer(std::move(buffer)),
×
208
      _bufferPtr(_buffer.get()),
×
209
      _start(nullptr),
210
      _pos(0),
211
      _arena(),
212
      _stack(_arena),
×
213
      _keyWritten(false),
214
      options(&Options::Defaults) {
×
215
  if (VELOCYPACK_UNLIKELY(_bufferPtr == nullptr)) {
×
216
    throw Exception(Exception::InternalError, "Buffer cannot be a nullptr");
×
217
  }
218
  _start = _bufferPtr->data();
×
219
  _pos = _bufferPtr->size();
×
220

221
  // do a full initial allocation in the arena, so we can maximize its usage
222
  _stack.reserve(arenaSize / sizeof(decltype(_stack)::value_type));
×
223
}
×
224

225
// create an empty Builder, using an existing buffer
226
Builder::Builder(std::shared_ptr<Buffer<uint8_t>> buffer, Options const* opts)
×
227
    : Builder(std::move(buffer)) {
×
228
  if (VELOCYPACK_UNLIKELY(opts == nullptr)) {
×
229
    throw Exception(Exception::InternalError, "Options cannot be a nullptr");
×
230
  }
231
  options = opts;
×
232
}
×
233

234
// create a Builder that uses an existing Buffer and options.
235
// the Builder will not claim ownership for its Buffer
236
Builder::Builder(Buffer<uint8_t>& buffer) noexcept
×
237
    : _bufferPtr(&buffer),
238
      _start(_bufferPtr->data()),
×
239
      _pos(buffer.size()),
×
240
      _arena(),
241
      _stack(_arena),
×
242
      _keyWritten(false),
243
      options(&Options::Defaults) {
×
244
  // do a full initial allocation in the arena, so we can maximize its usage
245
  _stack.reserve(arenaSize / sizeof(decltype(_stack)::value_type));
×
246
}
×
247

248
// create a Builder that uses an existing Buffer. the Builder will not
249
// claim ownership for its Buffer
250
Builder::Builder(Buffer<uint8_t>& buffer, Options const* opts)
×
251
    : Builder(buffer) {
×
252
  if (VELOCYPACK_UNLIKELY(opts == nullptr)) {
×
253
    throw Exception(Exception::InternalError, "Options cannot be a nullptr");
×
254
  }
255
  options = opts;
×
256
}
×
257

258
// populate a Builder from a Slice
259
Builder::Builder(Slice slice, Options const* options) : Builder(options) {
×
260
  add(slice);
×
261
}
×
262

263
Builder::Builder(Builder const& that)
×
264
    : _bufferPtr(nullptr),
265
      _start(nullptr),
266
      _pos(that._pos),
×
267
      _arena(),
268
      _stack(_arena),
×
269
      _indexes(that._indexes),
×
270
      _keyWritten(that._keyWritten),
×
271
      options(that.options) {
×
272
  VELOCYPACK_ASSERT(options != nullptr);
×
273

274
  _stack = that._stack;
×
275

276
  if (that._buffer == nullptr) {
×
277
    _bufferPtr = that._bufferPtr;
×
278
  } else {
279
    _buffer = std::make_shared<Buffer<uint8_t>>(*that._buffer);
×
280
    _bufferPtr = _buffer.get();
×
281
  }
282

283
  if (_bufferPtr != nullptr) {
×
284
    _start = _bufferPtr->data();
×
285
  }
286

287
  // do a full initial allocation in the arena, so we can maximize its usage
288
  _stack.reserve(arenaSize / sizeof(decltype(_stack)::value_type));
×
289
}
×
290

291
Builder& Builder::operator=(Builder const& that) {
×
292
  if (this != &that) {
×
293
    if (that._buffer == nullptr) {
×
294
      _buffer.reset();
×
295
      _bufferPtr = that._bufferPtr;
×
296
    } else {
297
      _buffer = std::make_shared<Buffer<uint8_t>>(*that._buffer);
×
298
      _bufferPtr = _buffer.get();
×
299
    }
300
    if (_bufferPtr == nullptr) {
×
301
      _start = nullptr;
×
302
    } else {
303
      _start = _bufferPtr->data();
×
304
    }
305
    _pos = that._pos;
×
306
    _stack = that._stack;
×
307
    _indexes = that._indexes;
×
308
    _keyWritten = that._keyWritten;
×
309
    options = that.options;
×
310
  }
311
  VELOCYPACK_ASSERT(options != nullptr);
×
312
  return *this;
×
313
}
314

315
Builder::Builder(Builder&& that) noexcept
×
316
    : _buffer(std::move(that._buffer)),
×
317
      _bufferPtr(nullptr),
318
      _start(nullptr),
319
      _pos(that._pos),
×
320
      _arena(),
321
      _stack(_arena),
×
322
      _indexes(std::move(that._indexes)),
×
323
      _keyWritten(that._keyWritten),
×
324
      options(that.options) {
×
325
  // do a full initial allocation in the arena, so we can maximize its usage
326
  _stack.reserve(arenaSize / sizeof(decltype(_stack)::value_type));
×
327
  _stack = std::move(that._stack);
×
328

329
  if (_buffer != nullptr) {
×
330
    _bufferPtr = _buffer.get();
×
331
  } else {
332
    _bufferPtr = that._bufferPtr;
×
333
  }
334
  if (_bufferPtr != nullptr) {
×
335
    _start = _bufferPtr->data();
×
336
  }
337
  VELOCYPACK_ASSERT(that._buffer == nullptr);
×
338
  that._bufferPtr = nullptr;
×
339
  that.clear();
×
340
}
×
341

342
Builder& Builder::operator=(Builder&& that) noexcept {
×
343
  if (this != &that) {
×
344
    _buffer = std::move(that._buffer);
×
345
    if (_buffer != nullptr) {
×
346
      _bufferPtr = _buffer.get();
×
347
    } else {
348
      _bufferPtr = that._bufferPtr;
×
349
    }
350
    if (_bufferPtr != nullptr) {
×
351
      _start = _bufferPtr->data();
×
352
    } else {
353
      _start = nullptr;
×
354
    }
355
    _pos = that._pos;
×
356
    // do a full initial allocation in the arena, so we can maximize its usage
357
    _stack.reserve(arenaSize / sizeof(decltype(_stack)::value_type));
×
358
    _stack = std::move(that._stack);
×
359
    _indexes = std::move(that._indexes);
×
360
    _keyWritten = that._keyWritten;
×
361
    options = that.options;
×
362
    VELOCYPACK_ASSERT(that._buffer == nullptr);
×
363
    that._bufferPtr = nullptr;
×
364
    that.clear();
×
365
  }
366
  return *this;
×
367
}
368

369
std::string Builder::toString() const {
×
370
  Options opts;
×
371
  opts.prettyPrint = true;
×
372

373
  std::string buffer;
×
374
  StringSink sink(&buffer);
×
375
  Dumper::dump(slice(), &sink, &opts);
×
376
  return buffer;
×
377
}
378

379
std::string Builder::toJson() const {
×
380
  std::string buffer;
×
381
  StringSink sink(&buffer);
×
382
  Dumper::dump(slice(), &sink);
×
383
  return buffer;
×
384
}
385

386
void Builder::sortObjectIndexShort(
×
387
    uint8_t* objBase, std::vector<ValueLength>::iterator indexStart,
388
    std::vector<ValueLength>::iterator indexEnd) const {
389
  std::sort(indexStart, indexEnd,
×
390
            [objBase](ValueLength const& a, ValueLength const& b) {
×
391
              uint8_t const* aa = objBase + a;
×
392
              uint8_t const* bb = objBase + b;
×
393
              if (*aa >= 0x40 && *aa <= 0xbe && *bb >= 0x40 && *bb <= 0xbe) {
×
394
                // The fast path, short strings:
395
                uint8_t m = (std::min)(*aa - 0x40, *bb - 0x40);
×
396
                int c = std::memcmp(aa + 1, bb + 1, checkOverflow(m));
×
397
                return (c < 0 || (c == 0 && *aa < *bb));
×
398
              } else {
399
                uint64_t lena;
400
                uint64_t lenb;
401
                aa = findAttrName(aa, lena);
×
402
                bb = findAttrName(bb, lenb);
×
403
                uint64_t m = (std::min)(lena, lenb);
×
404
                int c = std::memcmp(aa, bb, checkOverflow(m));
×
405
                return (c < 0 || (c == 0 && lena < lenb));
×
406
              }
407
            });
408
}
×
409

410
void Builder::sortObjectIndexLong(
×
411
    uint8_t* objBase, std::vector<ValueLength>::iterator indexStart,
412
    std::vector<ValueLength>::iterator indexEnd) const {
413
#ifndef VELOCYPACK_NO_THREADLOCALS
414
  std::unique_ptr<std::vector<SortEntry>>& tmp = ::sortEntries;
×
415

416
  // start with clean sheet in case the previous run left something
417
  // in the vector (e.g. when bailing out with an exception)
418
  if (::sortEntries == nullptr) {
×
419
    ::sortEntries = std::make_unique<std::vector<SortEntry>>();
×
420
  } else {
421
    ::sortEntries->clear();
×
422
  }
423
#else
424
  auto tmp = std::make_unique<std::vector<SortEntry>>();
425
#endif
426

427
  std::size_t const n = std::distance(indexStart, indexEnd);
×
428
  VELOCYPACK_ASSERT(n > 1);
×
429
  tmp->reserve(std::max(::minSortEntriesAllocation, n));
×
430
  for (std::size_t i = 0; i < n; i++) {
×
431
    SortEntry e;
432
    e.offset = indexStart[i];
×
433
    e.nameStart = ::findAttrName(objBase + e.offset, e.nameSize);
×
434
    tmp->push_back(e);
×
435
  }
436
  VELOCYPACK_ASSERT(tmp->size() == n);
×
437
  std::sort(tmp->begin(), tmp->end(),
×
438
            [](SortEntry const& a, SortEntry const& b)
×
439
#ifdef VELOCYPACK_64BIT
440
                noexcept
441
#endif
442
            {
443
              // return true iff a < b:
444
              uint64_t sizea = a.nameSize;
×
445
              uint64_t sizeb = b.nameSize;
×
446
              std::size_t const compareLength =
447
                  checkOverflow((std::min)(sizea, sizeb));
×
448
              int res = std::memcmp(a.nameStart, b.nameStart, compareLength);
×
449

450
              return (res < 0 || (res == 0 && sizea < sizeb));
×
451
            });
452

453
  // copy back the sorted offsets
454
  for (std::size_t i = 0; i < n; i++) {
×
455
    indexStart[i] = (*tmp)[i].offset;
×
456
  }
457
}
×
458

459
Builder& Builder::closeEmptyArrayOrObject(ValueLength pos, bool isArray) {
×
460
  // empty Array or Object
461
  _start[pos] = (isArray ? 0x01 : 0x0a);
×
462
  VELOCYPACK_ASSERT(_pos == pos + 9);
×
463
  rollback(8);  // no bytelength and number subvalues needed
×
464
  closeLevel();
×
465
  return *this;
×
466
}
467

468
bool Builder::closeCompactArrayOrObject(
×
469
    ValueLength pos, bool isArray,
470
    std::vector<ValueLength>::iterator indexStart,
471
    std::vector<ValueLength>::iterator indexEnd) {
472
  std::size_t const n = std::distance(indexStart, indexEnd);
×
473

474
  // use compact notation
475
  ValueLength nLen = getVariableValueLength(static_cast<ValueLength>(n));
×
476
  VELOCYPACK_ASSERT(nLen > 0);
×
477
  ValueLength byteSize = _pos - (pos + 8) + nLen;
×
478
  VELOCYPACK_ASSERT(byteSize > 0);
×
479
  ValueLength bLen = getVariableValueLength(byteSize);
×
480
  byteSize += bLen;
×
481
  if (getVariableValueLength(byteSize) != bLen) {
×
482
    byteSize += 1;
×
483
    bLen += 1;
×
484
  }
485

486
  if (bLen < 9) {
×
487
    // can only use compact notation if total byte length is at most 8 bytes
488
    // long
489
    _start[pos] = (isArray ? 0x13 : 0x14);
×
490
    ValueLength targetPos = 1 + bLen;
×
491

492
    if (_pos > (pos + 9)) {
×
493
      ValueLength len = _pos - (pos + 9);
×
494
      memmove(_start + pos + targetPos, _start + pos + 9, checkOverflow(len));
×
495
    }
496

497
    // store byte length
498
    VELOCYPACK_ASSERT(byteSize > 0);
×
499
    storeVariableValueLength<false>(_start + pos + 1, byteSize);
×
500

501
    // need additional memory for storing the number of values
502
    if (nLen > 8 - bLen) {
×
503
      reserve(nLen);
×
504
    }
505
    storeVariableValueLength<true>(_start + pos + byteSize - 1,
×
506
                                   static_cast<ValueLength>(n));
507

508
    rollback(8);
×
509
    advance(nLen + bLen);
×
510

511
    closeLevel();
×
512

513
#ifdef VELOCYPACK_DEBUG
514
    VELOCYPACK_ASSERT(_start[pos] == (isArray ? 0x13 : 0x14));
×
515
    VELOCYPACK_ASSERT(byteSize ==
×
516
                      readVariableValueLength<false>(_start + pos + 1));
517
    VELOCYPACK_ASSERT(isArray || _start[pos + 1 + bLen] != 0x00);
×
518
    VELOCYPACK_ASSERT(
×
519
        n == readVariableValueLength<true>(_start + pos + byteSize - 1));
520
#endif
521
    return true;
×
522
  }
523
  return false;
×
524
}
525

526
Builder& Builder::closeArray(ValueLength pos,
×
527
                             std::vector<ValueLength>::iterator indexStart,
528
                             std::vector<ValueLength>::iterator indexEnd) {
529
  std::size_t const n = std::distance(indexStart, indexEnd);
×
530
  VELOCYPACK_ASSERT(n > 0);
×
531

532
  bool needIndexTable = true;
×
533
  bool needNrSubs = true;
×
534

535
  if (n == 1) {
×
536
    // just one array entry
537
    needIndexTable = false;
×
538
    needNrSubs = false;
×
539
  } else if ((_pos - pos) - indexStart[0] ==
×
540
             n * (indexStart[1] - indexStart[0])) {
×
541
    // In this case it could be that all entries have the same length
542
    // and we do not need an offset table at all:
543
    bool buildIndexTable = false;
×
544
    ValueLength const subLen = indexStart[1] - indexStart[0];
×
545
    if ((_pos - pos) - indexStart[n - 1] != subLen) {
×
546
      buildIndexTable = true;
×
547
    } else {
548
      for (std::size_t i = 1; i < n - 1; i++) {
×
549
        if (indexStart[i + 1] - indexStart[i] != subLen) {
×
550
          // different lengths
551
          buildIndexTable = true;
×
552
          break;
×
553
        }
554
      }
555
    }
556

557
    if (!buildIndexTable) {
×
558
      needIndexTable = false;
×
559
      needNrSubs = false;
×
560
    }
561
  }
562

563
  VELOCYPACK_ASSERT(needIndexTable == needNrSubs);
×
564

565
  // First determine byte length and its format:
566
  unsigned int offsetSize;
567
  // can be 1, 2, 4 or 8 for the byte width of the offsets,
568
  // the byte length and the number of subvalues:
569
  bool allowMemmove =
570
      ::isAllowedToMemmove(options, _start + pos, indexStart, indexEnd, 1);
×
571
  if (_pos - pos + (needIndexTable ? n : 0) -
×
572
          (allowMemmove ? (needNrSubs ? 6 : 7) : 0) <=
×
573
      0xff) {
574
    // We have so far used _pos - pos bytes, including the reserved 8
575
    // bytes for byte length and number of subvalues. In the 1-byte number
576
    // case we would win back 6 bytes but would need one byte per subvalue
577
    // for the index table
578
    offsetSize = 1;
×
579
  } else {
580
    allowMemmove =
581
        ::isAllowedToMemmove(options, _start + pos, indexStart, indexEnd, 2);
×
582
    if (_pos - pos + (needIndexTable ? 2 * n : 0) -
×
583
            (allowMemmove ? (needNrSubs ? 4 : 6) : 0) <=
×
584
        0xffff) {
585
      offsetSize = 2;
×
586
    } else {
587
      allowMemmove = false;
×
588
      if (_pos - pos + (needIndexTable ? 4 * n : 0) <= 0xffffffffu) {
×
589
        offsetSize = 4;
×
590
      } else {
591
        offsetSize = 8;
×
592
      }
593
    }
594
  }
595

596
  VELOCYPACK_ASSERT(offsetSize == 1 || offsetSize == 2 || offsetSize == 4 ||
×
597
                    offsetSize == 8);
598
  VELOCYPACK_ASSERT(!allowMemmove || offsetSize == 1 || offsetSize == 2);
×
599

600
  if (offsetSize < 8 && !needIndexTable &&
×
601
      options->paddingBehavior == Options::PaddingBehavior::UsePadding) {
×
602
    // if we are allowed to use padding, we will pad to 8 bytes anyway. as we
603
    // are not using an index table, we can also use type 0x05 for all Arrays
604
    // without making things worse space-wise
605
    offsetSize = 8;
×
606
    allowMemmove = false;
×
607
  }
608

609
  // fix head byte
610
  _start[pos] = ::determineArrayType(needIndexTable, offsetSize);
×
611

612
  // Maybe we need to move down data:
613
  if (allowMemmove) {
×
614
    // check if one of the first entries in the array is ValueType::None
615
    // (0x00). in this case, we could not distinguish between a None (0x00)
616
    // and the optional padding. so we must prevent the memmove here
617
    ValueLength targetPos = 1 + 2 * offsetSize;
×
618
    if (!needIndexTable) {
×
619
      targetPos -= offsetSize;
×
620
    }
621
    if (_pos > (pos + 9)) {
×
622
      ValueLength len = _pos - (pos + 9);
×
623
      memmove(_start + pos + targetPos, _start + pos + 9, checkOverflow(len));
×
624
    }
625
    ValueLength const diff = 9 - targetPos;
×
626
    rollback(diff);
×
627
    if (needIndexTable) {
×
628
      for (std::size_t i = 0; i < n; i++) {
×
629
        indexStart[i] -= diff;
×
630
      }
631
    }  // Note: if !needIndexTable the index array is now wrong!
632
  }
633

634
  // Now build the table:
635
  if (needIndexTable) {
×
636
    reserve(offsetSize * n + (offsetSize == 8 ? 8 : 0));
×
637
    ValueLength tableBase = _pos;
×
638
    advance(offsetSize * n);
×
639
    for (std::size_t i = 0; i < n; i++) {
×
640
      uint64_t x = indexStart[i];
×
641
      for (std::size_t j = 0; j < offsetSize; j++) {
×
642
        _start[tableBase + offsetSize * i + j] = x & 0xff;
×
643
        x >>= 8;
×
644
      }
645
    }
646
  }
647

648
  // Finally fix the byte width at tthe end:
649
  if (offsetSize == 8 && needNrSubs) {
×
650
    reserve(8);
×
651
    appendLengthUnchecked<8>(n);
×
652
  }
653

654
  // Fix the byte length in the beginning:
655
  ValueLength x = _pos - pos;
×
656
  for (unsigned int i = 1; i <= offsetSize; i++) {
×
657
    _start[pos + i] = x & 0xff;
×
658
    x >>= 8;
×
659
  }
660

661
  if (offsetSize < 8 && needNrSubs) {
×
662
    x = n;
×
663
    for (unsigned int i = offsetSize + 1; i <= 2 * offsetSize; i++) {
×
664
      _start[pos + i] = x & 0xff;
×
665
      x >>= 8;
×
666
    }
667
  }
668

669
  // Now the array or object is complete, we pop a ValueLength
670
  // off the _stack:
671
  closeLevel();
×
672
  return *this;
×
673
}
674

675
Builder& Builder::close() {
×
676
  if (VELOCYPACK_UNLIKELY(isClosed())) {
×
677
    throw Exception(Exception::BuilderNeedOpenCompound);
×
678
  }
679
  VELOCYPACK_ASSERT(!_stack.empty());
×
680
  ValueLength const pos = _stack.back().startPos;
×
681
  ValueLength const indexStartPos = _stack.back().indexStartPos;
×
682
  uint8_t const head = _start[pos];
×
683

684
  VELOCYPACK_ASSERT(head == 0x06 || head == 0x0b || head == 0x13 ||
×
685
                    head == 0x14);
686

687
  bool const isArray = (head == 0x06 || head == 0x13);
×
688
  std::vector<ValueLength>::iterator indexStart =
689
      _indexes.begin() + indexStartPos;
×
690
  std::vector<ValueLength>::iterator indexEnd = _indexes.end();
×
691
  ValueLength const n = std::distance(indexStart, indexEnd);
×
692

693
  if (n == 0) {
×
694
    closeEmptyArrayOrObject(pos, isArray);
×
695
    return *this;
×
696
  }
697

698
  // From now on index.size() > 0
699
  VELOCYPACK_ASSERT(n > 0);
×
700

701
  // check if we can use the compact Array / Object format
702
  if (head == 0x13 || head == 0x14 ||
×
703
      (head == 0x06 && options->buildUnindexedArrays) ||
×
704
      (head == 0x0b && (options->buildUnindexedObjects || n == 1))) {
×
705
    if (closeCompactArrayOrObject(pos, isArray, indexStart, indexEnd)) {
×
706
      // And, if desired, check attribute uniqueness:
707
      if ((head == 0x0b || head == 0x14) && options->checkAttributeUniqueness &&
×
708
          n > 1 && !checkAttributeUniqueness(Slice(_start + pos))) {
×
709
        // duplicate attribute name!
710
        throw Exception(Exception::DuplicateAttributeName);
×
711
      }
712
      return *this;
×
713
    }
714
    // This might fall through, if closeCompactArrayOrObject gave up!
715
  }
716

717
  if (isArray) {
×
718
    closeArray(pos, _indexes.begin() + indexStartPos, _indexes.end());
×
719
    return *this;
×
720
  }
721

722
  // from here on we are sure that we are dealing with Object types only.
723

724
  // fix head byte in case a compact Array / Object was originally requested
725
  _start[pos] = 0x0b;
×
726

727
  // First determine byte length and its format:
728
  unsigned int offsetSize = 8;
×
729
  // can be 1, 2, 4 or 8 for the byte width of the offsets,
730
  // the byte length and the number of subvalues:
731
  if (_pos - pos + n - 6 + effectivePaddingForOneByteMembers() <= 0xff) {
×
732
    // We have so far used _pos - pos bytes, including the reserved 8
733
    // bytes for byte length and number of subvalues. In the 1-byte number
734
    // case we would win back 6 bytes but would need one byte per subvalue
735
    // for the index table
736
    offsetSize = 1;
×
737
    // One could move down things in the offsetSize == 2 case as well,
738
    // since we only need 4 bytes in the beginning. However, saving these
739
    // 4 bytes has been sacrificed on the Altar of Performance.
740
  } else if (_pos - pos + 2 * n + effectivePaddingForTwoByteMembers() <=
×
741
             0xffff) {
742
    offsetSize = 2;
×
743
  } else if (_pos - pos + 4 * n <= 0xffffffffu) {
×
744
    offsetSize = 4;
×
745
  }
746

747
  if (offsetSize < 4 &&
×
748
      (options->paddingBehavior == Options::PaddingBehavior::NoPadding ||
×
749
       (offsetSize == 1 &&
×
750
        options->paddingBehavior == Options::PaddingBehavior::Flexible))) {
×
751
    // Maybe we need to move down data:
752
    ValueLength targetPos = 1 + 2 * offsetSize;
×
753
    if (_pos > (pos + 9)) {
×
754
      ValueLength len = _pos - (pos + 9);
×
755
      memmove(_start + pos + targetPos, _start + pos + 9, checkOverflow(len));
×
756
    }
757
    ValueLength const diff = 9 - targetPos;
×
758
    rollback(diff);
×
759
    for (std::size_t i = 0; i < n; i++) {
×
760
      indexStart[i] -= diff;
×
761
    }
762
  }
763

764
  // Now build the table:
765
  reserve(offsetSize * n + (offsetSize == 8 ? 8 : 0));
×
766
  ValueLength tableBase = _pos;
×
767
  advance(offsetSize * n);
×
768
  // Object
769
  if (n >= 2) {
×
770
    sortObjectIndex(_start + pos, indexStart, indexEnd);
×
771
  }
772
  for (std::size_t i = 0; i < n; ++i) {
×
773
    uint64_t x = indexStart[i];
×
774
    for (std::size_t j = 0; j < offsetSize; ++j) {
×
775
      _start[tableBase + offsetSize * i + j] = x & 0xff;
×
776
      x >>= 8;
×
777
    }
778
  }
779
  // Finally fix the byte width in the type byte:
780
  if (offsetSize > 1) {
×
781
    if (offsetSize == 2) {
×
782
      _start[pos] += 1;
×
783
    } else if (offsetSize == 4) {
×
784
      _start[pos] += 2;
×
785
    } else {  // offsetSize == 8
786
      _start[pos] += 3;
×
787
      // write number of items
788
      reserve(8);
×
789
      appendLengthUnchecked<8>(n);
×
790
    }
791
  }
792

793
  // Fix the byte length in the beginning:
794
  ValueLength const byteLength = _pos - pos;
×
795
  ValueLength x = byteLength;
×
796
  for (unsigned int i = 1; i <= offsetSize; i++) {
×
797
    _start[pos + i] = x & 0xff;
×
798
    x >>= 8;
×
799
  }
800

801
  if (offsetSize < 8) {
×
802
    ValueLength x = n;
×
803
    for (unsigned int i = offsetSize + 1; i <= 2 * offsetSize; i++) {
×
804
      _start[pos + i] = x & 0xff;
×
805
      x >>= 8;
×
806
    }
807
  }
808

809
#ifdef VELOCYPACK_DEBUG
810
  // make sure byte size and number of items are actually written correctly
811
  if (offsetSize == 1) {
×
812
    VELOCYPACK_ASSERT(_start[pos] == 0x0b);
×
813
    // read byteLength
814
    uint64_t v = readIntegerFixed<uint64_t, 1>(_start + pos + 1);
×
815
    VELOCYPACK_ASSERT(byteLength == v);
×
816
    // read byteLength n
817
    v = readIntegerFixed<uint64_t, 1>(_start + pos + 1 + offsetSize);
×
818
    VELOCYPACK_ASSERT(n == v);
×
819
  } else if (offsetSize == 2) {
×
820
    VELOCYPACK_ASSERT(_start[pos] == 0x0c);
×
821
    // read byteLength
822
    uint64_t v = readIntegerFixed<uint64_t, 2>(_start + pos + 1);
×
823
    VELOCYPACK_ASSERT(byteLength == v);
×
824
    // read byteLength n
825
    v = readIntegerFixed<uint64_t, 2>(_start + pos + 1 + offsetSize);
×
826
    VELOCYPACK_ASSERT(n == v);
×
827
  } else if (offsetSize == 4) {
×
828
    VELOCYPACK_ASSERT(_start[pos] == 0x0d);
×
829
    // read byteLength
830
    uint64_t v = readIntegerFixed<uint64_t, 4>(_start + pos + 1);
×
831
    VELOCYPACK_ASSERT(byteLength == v);
×
832
    // read byteLength n
833
    v = readIntegerFixed<uint64_t, 4>(_start + pos + 1 + offsetSize);
×
834
    VELOCYPACK_ASSERT(n == v);
×
835
  } else if (offsetSize == 8) {
×
836
    VELOCYPACK_ASSERT(_start[pos] == 0x0e);
×
837
    uint64_t v = readIntegerFixed<uint64_t, 4>(_start + pos + 1);
×
838
    VELOCYPACK_ASSERT(byteLength == v);
×
839
  }
840
#endif
841

842
  // And, if desired, check attribute uniqueness:
843
  if (options->checkAttributeUniqueness && n > 1 &&
×
844
      !checkAttributeUniqueness(Slice(_start + pos))) {
×
845
    // duplicate attribute name!
846
    throw Exception(Exception::DuplicateAttributeName);
×
847
  }
848

849
  // Now the array or object is complete, we pop a ValueLength
850
  // off the _stack:
851
  closeLevel();
×
852

853
  return *this;
×
854
}
855

856
// checks whether an Object value has a specific key attribute
857
bool Builder::hasKey(std::string_view key) const {
×
858
  return !getKey(key).isNone();
×
859
}
860

861
// return the value for a specific key of an Object value
862
Slice Builder::getKey(std::string_view key) const {
×
863
  if (VELOCYPACK_UNLIKELY(_stack.empty())) {
×
864
    throw Exception(Exception::BuilderNeedOpenObject);
×
865
  }
866
  VELOCYPACK_ASSERT(!_stack.empty());
×
867
  ValueLength const pos = _stack.back().startPos;
×
868
  ValueLength const indexStartPos = _stack.back().indexStartPos;
×
869
  if (VELOCYPACK_UNLIKELY(_start[pos] != 0x0b && _start[pos] != 0x14)) {
×
870
    throw Exception(Exception::BuilderNeedOpenObject);
×
871
  }
872
  std::vector<ValueLength>::const_iterator indexStart =
873
      _indexes.begin() + indexStartPos;
×
874
  std::vector<ValueLength>::const_iterator indexEnd = _indexes.end();
×
875
  while (indexStart != indexEnd) {
×
876
    Slice s(_start + pos + *indexStart);
×
877
    if (s.makeKey().isEqualString(key)) {
×
878
      return Slice(s.start() + s.byteSize());
×
879
    }
880
    ++indexStart;
×
881
  }
882
  return Slice();
×
883
}
884

885
void Builder::appendTag(uint64_t tag) {
×
886
  if (options->disallowTags) {
×
887
    // Tagged values explicitly disallowed
888
    throw Exception(Exception::BuilderTagsDisallowed);
×
889
  }
890
  if (tag <= 255) {
×
891
    reserve(1 + 1);
×
892
    appendByte(0xee);
×
893
    appendLengthUnchecked<1>(tag);
×
894
  } else {
895
    reserve(1 + 8);
×
896
    appendByte(0xef);
×
897
    appendLengthUnchecked<8>(tag);
×
898
  }
899
}
×
900

901
uint8_t* Builder::set(Value const& item) {
×
902
  auto const oldPos = _pos;
×
903
  auto ctype = item.cType();
×
904

905
  checkKeyHasValidType(item.valueType() == ValueType::String ||
×
906
                       item.valueType() == ValueType::UInt);
×
907

908
  // This method builds a single further VPack item at the current
909
  // append position. If this is an array or object, then an index
910
  // table is created and a new ValueLength is pushed onto the stack.
911
  switch (item.valueType()) {
×
912
    case ValueType::Null: {
×
913
      appendByte(0x18);
×
914
      break;
×
915
    }
916
    case ValueType::Bool: {
×
917
      if (VELOCYPACK_UNLIKELY(ctype != Value::CType::Bool)) {
×
918
        throw Exception(Exception::BuilderUnexpectedValue,
×
919
                        "Must give bool for ValueType::Bool");
×
920
      }
921
      appendByte(item.getBool() ? 0x1a : 0x19);
×
922
      break;
×
923
    }
924
    case ValueType::Double: {
×
925
      static_assert(sizeof(double) == sizeof(uint64_t),
926
                    "size of double is not 8 bytes");
927
      double v = 0.0;
×
928
      uint64_t x;
929
      switch (ctype) {
930
        case Value::CType::Double:
×
931
          v = item.getDouble();
×
932
          break;
×
933
        case Value::CType::Int64:
×
934
          v = static_cast<double>(item.getInt64());
×
935
          break;
×
936
        case Value::CType::UInt64:
×
937
          v = static_cast<double>(item.getUInt64());
×
938
          break;
×
939
        default:
×
940
          throw Exception(Exception::BuilderUnexpectedValue,
×
941
                          "Must give number for ValueType::Double");
×
942
      }
943
      reserve(1 + sizeof(double));
×
944
      appendByteUnchecked(0x1b);
×
945
      std::memcpy(&x, &v, sizeof(double));
×
946
      appendLengthUnchecked<sizeof(double)>(x);
×
947
      break;
×
948
    }
949
    case ValueType::SmallInt: {
×
950
      int64_t vv = 0;
×
951
      switch (ctype) {
952
        case Value::CType::Double:
×
953
          vv = static_cast<int64_t>(item.getDouble());
×
954
          break;
×
955
        case Value::CType::Int64:
×
956
          vv = item.getInt64();
×
957
          break;
×
958
        case Value::CType::UInt64:
×
959
          vv = static_cast<int64_t>(item.getUInt64());
×
960
          break;
×
961
        default:
×
962
          throw Exception(Exception::BuilderUnexpectedValue,
×
963
                          "Must give number for ValueType::SmallInt");
×
964
      }
965
      if (VELOCYPACK_UNLIKELY(vv < -6 || vv > 9)) {
×
966
        throw Exception(Exception::NumberOutOfRange,
×
967
                        "Number out of range of ValueType::SmallInt");
×
968
      }
969
      if (vv >= 0) {
×
970
        appendByte(static_cast<uint8_t>(vv + 0x30));
×
971
      } else {
972
        appendByte(static_cast<uint8_t>(vv + 0x40));
×
973
      }
974
      break;
×
975
    }
976
    case ValueType::Int: {
×
977
      int64_t v;
978
      switch (ctype) {
979
        case Value::CType::Double:
×
980
          v = static_cast<int64_t>(item.getDouble());
×
981
          break;
×
982
        case Value::CType::Int64:
×
983
          v = item.getInt64();
×
984
          break;
×
985
        case Value::CType::UInt64:
×
986
          v = toInt64(item.getUInt64());
×
987
          break;
×
988
        default:
×
989
          throw Exception(Exception::BuilderUnexpectedValue,
×
990
                          "Must give number for ValueType::Int");
×
991
      }
992
      addInt(v);
×
993
      break;
×
994
    }
995
    case ValueType::UInt: {
×
996
      uint64_t v = 0;
×
997
      switch (ctype) {
998
        case Value::CType::Double:
×
999
          if (VELOCYPACK_UNLIKELY(item.getDouble() < 0.0)) {
×
1000
            throw Exception(
×
1001
                Exception::BuilderUnexpectedValue,
1002
                "Must give non-negative number for ValueType::UInt");
×
1003
          }
1004
          v = static_cast<uint64_t>(item.getDouble());
×
1005
          break;
×
1006
        case Value::CType::Int64:
×
1007
          if (VELOCYPACK_UNLIKELY(item.getInt64() < 0)) {
×
1008
            throw Exception(
×
1009
                Exception::BuilderUnexpectedValue,
1010
                "Must give non-negative number for ValueType::UInt");
×
1011
          }
1012
          v = static_cast<uint64_t>(item.getInt64());
×
1013
          break;
×
1014
        case Value::CType::UInt64:
×
1015
          v = item.getUInt64();
×
1016
          break;
×
1017
        default:
×
1018
          throw Exception(Exception::BuilderUnexpectedValue,
×
1019
                          "Must give number for ValueType::UInt");
×
1020
      }
1021
      addUInt(v);
×
1022
      break;
×
1023
    }
1024
    case ValueType::String: {
×
1025
      char const* p;
1026
      std::size_t size;
1027
      if (ctype == Value::CType::String) {
×
1028
        std::string const* s = item.getString();
×
1029
        size = s->size();
×
1030
        p = s->data();
×
1031
      } else if (ctype == Value::CType::CharPtr) {
×
1032
        p = item.getCharPtr();
×
1033
        size = strlen(p);
×
1034
      } else if (ctype == Value::CType::StringView) {
×
1035
        std::string_view const* sv = item.getStringView();
×
1036
        size = sv->size();
×
1037
        p = sv->data();
×
1038
      } else {
1039
        throw Exception(
×
1040
            Exception::BuilderUnexpectedValue,
1041
            "Must give a string or char const* for ValueType::String");
×
1042
      }
1043
      if (size <= 126) {
×
1044
        // short string
1045
        reserve(1 + size);
×
1046
        appendByteUnchecked(static_cast<uint8_t>(0x40 + size));
×
1047
      } else {
1048
        // long string
1049
        reserve(1 + 8 + size);
×
1050
        appendByteUnchecked(0xbf);
×
1051
        appendLengthUnchecked<8>(size);
×
1052
      }
1053
      if (size != 0) {
×
1054
        VELOCYPACK_ASSERT(p != nullptr);
×
1055
        std::memcpy(_start + _pos, p, size);
×
1056
        advance(size);
×
1057
      }
1058
      break;
×
1059
    }
1060
    case ValueType::Array: {
×
1061
      addArray(item.unindexed());
×
1062
      break;
×
1063
    }
1064
    case ValueType::Object: {
×
1065
      addObject(item.unindexed());
×
1066
      break;
×
1067
    }
1068
    case ValueType::UTCDate: {
×
1069
      int64_t v;
1070
      switch (ctype) {
1071
        case Value::CType::Double:
×
1072
          v = static_cast<int64_t>(item.getDouble());
×
1073
          break;
×
1074
        case Value::CType::Int64:
×
1075
          v = item.getInt64();
×
1076
          break;
×
1077
        case Value::CType::UInt64:
×
1078
          v = toInt64(item.getUInt64());
×
1079
          break;
×
1080
        default:
×
1081
          throw Exception(Exception::BuilderUnexpectedValue,
×
1082
                          "Must give number for ValueType::UTCDate");
×
1083
      }
1084
      addUTCDate(v);
×
1085
      break;
×
1086
    }
1087
    case ValueType::Binary: {
×
1088
      if (VELOCYPACK_UNLIKELY(ctype != Value::CType::String &&
×
1089
                              ctype != Value::CType::CharPtr &&
1090
                              ctype != Value::CType::StringView)) {
1091
        throw Exception(Exception::BuilderUnexpectedValue,
×
1092
                        "Must provide std::string, std::string_view or char "
1093
                        "const* for ValueType::Binary");
×
1094
      }
1095
      char const* p;
1096
      ValueLength size;
1097
      if (ctype == Value::CType::String) {
×
1098
        p = item.getString()->data();
×
1099
        size = item.getString()->size();
×
1100
      } else if (ctype == Value::CType::StringView) {
×
1101
        p = item.getStringView()->data();
×
1102
        size = item.getStringView()->size();
×
1103
      } else {
1104
        p = item.getCharPtr();
×
1105
        size = strlen(p);
×
1106
      }
1107
      appendUInt(size, 0xbf);
×
1108
      if (size != 0) {
×
1109
        reserve(size);
×
1110
        VELOCYPACK_ASSERT(p != nullptr);
×
1111
        std::memcpy(_start + _pos, p, checkOverflow(size));
×
1112
        advance(size);
×
1113
      }
1114
      break;
×
1115
    }
1116
    case ValueType::External: {
×
1117
      if (options->disallowExternals) {
×
1118
        // External values explicitly disallowed as a security
1119
        // precaution
1120
        throw Exception(Exception::BuilderExternalsDisallowed);
×
1121
      }
1122
      if (VELOCYPACK_UNLIKELY(ctype != Value::CType::VoidPtr)) {
×
1123
        throw Exception(Exception::BuilderUnexpectedValue,
×
1124
                        "Must give void pointer for ValueType::External");
×
1125
      }
1126
      reserve(1 + sizeof(void*));
×
1127
      // store pointer. this doesn't need to be portable
1128
      appendByteUnchecked(0x1d);
×
1129
      void const* value = item.getExternal();
×
1130
      std::memcpy(_start + _pos, &value, sizeof(void*));
×
1131
      advance(sizeof(void*));
×
1132
      break;
×
1133
    }
1134
    case ValueType::Illegal: {
×
1135
      appendByte(0x17);
×
1136
      break;
×
1137
    }
1138
    case ValueType::MinKey: {
×
1139
      appendByte(0x1e);
×
1140
      break;
×
1141
    }
1142
    case ValueType::MaxKey: {
×
1143
      appendByte(0x1f);
×
1144
      break;
×
1145
    }
1146
    case ValueType::BCD: {
×
1147
      throw Exception(Exception::NotImplemented);
×
1148
    }
1149
    case ValueType::Tagged: {
×
1150
      throw Exception(Exception::NotImplemented);
×
1151
    }
1152
    case ValueType::Custom: {
×
1153
      if (options->disallowCustom) {
×
1154
        // Custom values explicitly disallowed as a security precaution
1155
        throw Exception(Exception::BuilderCustomDisallowed);
×
1156
      }
1157
      throw Exception(Exception::BuilderUnexpectedType,
×
1158
                      "Cannot set a ValueType::Custom with this method");
×
1159
    }
1160
    case ValueType::None: {
×
1161
      throw Exception(Exception::BuilderUnexpectedType,
×
1162
                      "Cannot set a ValueType::None");
×
1163
    }
1164
  }
1165
  return _start + oldPos;
×
1166
}
1167

1168
uint8_t* Builder::set(Slice const& item) {
×
1169
  checkKeyHasValidType(item);
×
1170

1171
  if (VELOCYPACK_UNLIKELY(options->disallowCustom && item.isCustom())) {
×
1172
    // Custom values explicitly disallowed as a security precaution
1173
    throw Exception(Exception::BuilderCustomDisallowed);
×
1174
  }
1175

1176
  ValueLength const l = item.byteSize();
×
1177
  if (l != 0) {
×
1178
    reserve(l);
×
1179
    VELOCYPACK_ASSERT(item.start() != nullptr);
×
1180
    std::memcpy(_start + _pos, item.start(), checkOverflow(l));
×
1181
    advance(l);
×
1182
  }
1183
  return _start + _pos - l;
×
1184
}
1185

1186
uint8_t* Builder::set(ValuePair const& pair) {
×
1187
  // This method builds a single further VPack item at the current
1188
  // append position. This is the case for ValueType::String,
1189
  // ValueType::Binary, or ValueType::Custom, which can be built
1190
  // with two pieces of information
1191

1192
  auto const oldPos = _pos;
×
1193

1194
  checkKeyHasValidType(pair.valueType() == ValueType::String ||
×
1195
                       pair.valueType() == ValueType::UInt);
×
1196

1197
  if (pair.valueType() == ValueType::String) {
×
1198
    uint64_t size = pair.getSize();
×
1199
    if (size > 126) {
×
1200
      // long string
1201
      reserve(1 + 8 + size);
×
1202
      appendByteUnchecked(0xbf);
×
1203
      appendLengthUnchecked<8>(size);
×
1204
    } else {
1205
      // short string
1206
      reserve(1 + size);
×
1207
      appendByteUnchecked(static_cast<uint8_t>(0x40 + size));
×
1208
    }
1209
    if (size != 0) {
×
1210
      VELOCYPACK_ASSERT(pair.getStart() != nullptr);
×
1211
      std::memcpy(_start + _pos, pair.getStart(), checkOverflow(size));
×
1212
      advance(size);
×
1213
    }
1214
    return _start + oldPos;
×
1215
  } else if (pair.valueType() == ValueType::Binary) {
×
1216
    uint64_t v = pair.getSize();
×
1217
    reserve(9 + v);
×
1218
    appendUInt(v, 0xbf);
×
1219
    if (v != 0) {
×
1220
      VELOCYPACK_ASSERT(pair.getStart() != nullptr);
×
1221
      std::memcpy(_start + _pos, pair.getStart(), checkOverflow(v));
×
1222
      advance(v);
×
1223
    }
1224
    return _start + oldPos;
×
1225
  } else if (pair.valueType() == ValueType::Custom) {
×
1226
    if (options->disallowCustom) {
×
1227
      // Custom values explicitly disallowed as a security precaution
1228
      throw Exception(Exception::BuilderCustomDisallowed);
×
1229
    }
1230
    // We only reserve space here, the caller has to fill in the custom type
1231
    uint64_t size = pair.getSize();
×
1232
    reserve(size);
×
1233
    uint8_t const* p = pair.getStart();
×
1234
    if (p != nullptr) {
×
1235
      std::memcpy(_start + _pos, p, checkOverflow(size));
×
1236
    }
1237
    advance(size);
×
1238
    return _start + _pos - size;
×
1239
  }
1240
  throw Exception(Exception::BuilderUnexpectedType,
×
1241
                  "Only ValueType::Binary, ValueType::String and "
1242
                  "ValueType::Custom are valid for ValuePair argument");
×
1243
}
1244

1245
void Builder::cleanupAdd() noexcept {
×
1246
  VELOCYPACK_ASSERT(!_stack.empty());
×
1247
  VELOCYPACK_ASSERT(!_indexes.empty());
×
1248
  _indexes.pop_back();
×
1249
}
×
1250

1251
void Builder::reportAdd() {
×
1252
  VELOCYPACK_ASSERT(!_stack.empty());
×
1253
  if (_indexes.capacity() == 0) {
×
1254
    // make an initial reservation for several items at
1255
    // a time, in order to save frequent reallocations for
1256
    // the first few attributes
1257
    _indexes.reserve(8);
×
1258
  }
1259
  _indexes.push_back(_pos - _stack.back().startPos);
×
1260
}
×
1261

1262
void Builder::closeLevel() noexcept {
×
1263
  VELOCYPACK_ASSERT(!_stack.empty());
×
1264
  ValueLength const indexStartPos = _stack.back().indexStartPos;
×
1265
  _stack.pop_back();
×
1266
  _indexes.erase(_indexes.begin() + indexStartPos, _indexes.end());
×
1267
}
×
1268

1269
bool Builder::checkAttributeUniqueness(Slice obj) const {
×
1270
  VELOCYPACK_ASSERT(options->checkAttributeUniqueness == true);
×
1271
  VELOCYPACK_ASSERT(obj.isObject());
×
1272
  VELOCYPACK_ASSERT(obj.length() >= 2);
×
1273

1274
  if (obj.isSorted()) {
×
1275
    // object attributes are sorted
1276
    return checkAttributeUniquenessSorted(obj);
×
1277
  }
1278

1279
  return checkAttributeUniquenessUnsorted(obj);
×
1280
}
1281

1282
bool Builder::checkAttributeUniquenessSorted(Slice obj) const {
×
1283
  ObjectIterator it(obj, false);
×
1284

1285
  // fetch initial key
1286
  Slice previous = it.key(true);
×
1287
  ValueLength len;
1288
  char const* p = previous.getString(len);
×
1289

1290
  // advance to next key already
1291
  it.next();
×
1292

1293
  do {
×
1294
    Slice const current = it.key(true);
×
1295
    VELOCYPACK_ASSERT(current.isString());
×
1296

1297
    ValueLength len2;
1298
    char const* q = current.getStringUnchecked(len2);
×
1299

1300
    if (len == len2 && std::memcmp(p, q, checkOverflow(len2)) == 0) {
×
1301
      // identical key
1302
      return false;
×
1303
    }
1304
    // re-use already calculated values for next round
1305
    len = len2;
×
1306
    p = q;
×
1307
    it.next();
×
1308
  } while (it.valid());
×
1309

1310
  // all keys unique
1311
  return true;
×
1312
}
1313

1314
bool Builder::checkAttributeUniquenessUnsorted(Slice obj) const {
×
1315
  // cutoff value for linear attribute uniqueness scan
1316
  // unsorted objects with this amount of attributes (or less) will
1317
  // be validated using a non-allocating scan over the attributes
1318
  // objects with more attributes will use a validation routine that
1319
  // will use an std::unordered_set for O(1) lookups but with heap
1320
  // allocations
1321
  ObjectIterator it(obj, true);
×
1322

1323
  if (it.size() <= ::linearAttributeUniquenessCutoff) {
×
1324
    return ::checkAttributeUniquenessUnsortedBrute(it);
×
1325
  }
1326
  return ::checkAttributeUniquenessUnsortedSet(it);
×
1327
}
1328

1329
// Add all subkeys and subvalues into an object from an ObjectIterator
1330
// and leaves open the object intentionally
1331
uint8_t* Builder::add(ObjectIterator&& sub) {
×
1332
  if (VELOCYPACK_UNLIKELY(_stack.empty())) {
×
1333
    throw Exception(Exception::BuilderNeedOpenObject);
×
1334
  }
1335
  ValueLength const pos = _stack.back().startPos;
×
1336
  if (VELOCYPACK_UNLIKELY(_start[pos] != 0x0b && _start[pos] != 0x14)) {
×
1337
    throw Exception(Exception::BuilderNeedOpenObject);
×
1338
  }
1339
  if (VELOCYPACK_UNLIKELY(_keyWritten)) {
×
1340
    throw Exception(Exception::BuilderKeyAlreadyWritten);
×
1341
  }
1342
  auto const oldPos = _pos;
×
1343
  while (sub.valid()) {
×
1344
    auto current = (*sub);
×
1345
    add(current.key);
×
1346
    add(current.value);
×
1347
    sub.next();
×
1348
  }
1349
  return _start + oldPos;
×
1350
}
1351

1352
// Add all subkeys and subvalues into an object from an ArrayIterator
1353
// and leaves open the array intentionally
1354
uint8_t* Builder::add(ArrayIterator&& sub) {
×
1355
  if (VELOCYPACK_UNLIKELY(_stack.empty())) {
×
1356
    throw Exception(Exception::BuilderNeedOpenArray);
×
1357
  }
1358
  ValueLength const pos = _stack.back().startPos;
×
1359
  if (VELOCYPACK_UNLIKELY(_start[pos] != 0x06 && _start[pos] != 0x13)) {
×
1360
    throw Exception(Exception::BuilderNeedOpenArray);
×
1361
  }
1362
  auto const oldPos = _pos;
×
1363
  while (sub.valid()) {
×
1364
    add(*sub);
×
1365
    sub.next();
×
1366
  }
1367
  return _start + oldPos;
×
1368
}
1369

1370
ValueLength Builder::effectivePaddingForOneByteMembers() const noexcept {
×
1371
  // 8 bytes - object length (1 byte) - number of items (1 byte) = 6 bytes
1372
  return (options->paddingBehavior == Options::PaddingBehavior::UsePadding ? 6
×
1373
                                                                           : 0);
×
1374
}
1375

1376
ValueLength Builder::effectivePaddingForTwoByteMembers() const noexcept {
×
1377
  // 8 bytes - object length (2 bytes) - number of items (2 bytes) = 4 bytes
1378
  return (options->paddingBehavior == Options::PaddingBehavior::UsePadding ? 4
×
1379
                                                                           : 0);
×
1380
}
1381

1382
static_assert(sizeof(double) == 8, "double is not 8 bytes");
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc