• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

tarantool / luajit / 12412391679

19 Dec 2024 12:05PM UTC coverage: 92.962% (+0.04%) from 92.921%
12412391679

push

github

Buristan
test: fix flaky fix-slots-overflow-for-varg-record

The aforementioned test is flaky when it is run by Tarantool, since the
necessary trace isn't compiled due to hotcount collisions. This patch
fixes this by adding the additional reset of hot counters.

5695 of 6033 branches covered (94.4%)

Branch coverage included in aggregate %.

21712 of 23449 relevant lines covered (92.59%)

2992781.39 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

99.16
/src/lj_gc.c
1
/*
2
** Garbage collector.
3
** Copyright (C) 2005-2017 Mike Pall. See Copyright Notice in luajit.h
4
**
5
** Major portions taken verbatim or adapted from the Lua interpreter.
6
** Copyright (C) 1994-2008 Lua.org, PUC-Rio. See Copyright Notice in lua.h
7
*/
8

9
#define lj_gc_c
10
#define LUA_CORE
11

12
#include "lj_obj.h"
13
#include "lj_gc.h"
14
#include "lj_err.h"
15
#include "lj_buf.h"
16
#include "lj_str.h"
17
#include "lj_tab.h"
18
#include "lj_func.h"
19
#include "lj_udata.h"
20
#include "lj_meta.h"
21
#include "lj_state.h"
22
#include "lj_frame.h"
23
#if LJ_HASFFI
24
#include "lj_ctype.h"
25
#include "lj_cdata.h"
26
#endif
27
#include "lj_trace.h"
28
#include "lj_dispatch.h"
29
#include "lj_vm.h"
30
#include "lj_vmevent.h"
31

32
#define GCSTEPSIZE        1024u
33
#define GCSWEEPMAX        40
34
#define GCSWEEPCOST        10
35
#define GCFINALIZECOST        100
36

37
/* Macros to set GCobj colors and flags. */
38
#define white2gray(x)                ((x)->gch.marked &= (uint8_t)~LJ_GC_WHITES)
39
#define gray2black(x)                ((x)->gch.marked |= LJ_GC_BLACK)
40
#define isfinalized(u)                ((u)->marked & LJ_GC_FINALIZED)
41

42
/* -- Mark phase ---------------------------------------------------------- */
43

44
/* Mark a TValue (if needed). */
45
#define gc_marktv(g, tv) \
46
  { lj_assertG(!tvisgcv(tv) || (~itype(tv) == gcval(tv)->gch.gct), \
47
               "TValue and GC type mismatch"); \
48
    if (tviswhite(tv)) gc_mark(g, gcV(tv)); }
49

50
/* Mark a GCobj (if needed). */
51
#define gc_markobj(g, o) \
52
  { if (iswhite(obj2gco(o))) gc_mark(g, obj2gco(o)); }
53

54
/* Mark a string object. */
55
#define gc_mark_str(s)                ((s)->marked &= (uint8_t)~LJ_GC_WHITES)
56

57
/* Mark a white GCobj. */
58
static void gc_mark(global_State *g, GCobj *o)
191,063,868✔
59
{
60
  int gct = o->gch.gct;
191,070,576✔
61
  lj_assertG(iswhite(o), "mark of non-white object");
191,070,576✔
62
  lj_assertG(!isdead(g, o), "mark of dead object");
191,070,576✔
63
  white2gray(o);
191,070,576✔
64
  if (LJ_UNLIKELY(gct == ~LJ_TUDATA)) {
191,070,576✔
65
    GCtab *mt = tabref(gco2ud(o)->metatable);
1,055,414✔
66
    gray2black(o);  /* Userdata are never gray. */
1,055,414✔
67
    if (mt) gc_markobj(g, mt);
1,055,414✔
68
    gc_markobj(g, tabref(gco2ud(o)->env));
1,055,414✔
69
  } else if (LJ_UNLIKELY(gct == ~LJ_TUPVAL)) {
190,015,162✔
70
    GCupval *uv = gco2uv(o);
1,718,536✔
71
    gc_marktv(g, uvval(uv));
1,718,536✔
72
    if (uv->closed)
1,718,536✔
73
      gray2black(o);  /* Closed upvalues are never gray. */
151,916✔
74
  } else if (gct != ~LJ_TSTR && gct != ~LJ_TCDATA) {
188,296,626✔
75
    lj_assertG(gct == ~LJ_TFUNC || gct == ~LJ_TTAB ||
8,763,241✔
76
               gct == ~LJ_TTHREAD || gct == ~LJ_TPROTO || gct == ~LJ_TTRACE,
77
               "bad GC type %d", gct);
78
    setgcrefr(o->gch.gclist, g->gc.gray);
8,763,241✔
79
    setgcref(g->gc.gray, o);
8,763,241✔
80
  }
81
}
191,063,868✔
82

83
/* Mark GC roots. */
84
static void gc_mark_gcroot(global_State *g)
13,484✔
85
{
86
  ptrdiff_t i;
13,484✔
87
  for (i = 0; i < GCROOT_MAX; i++)
539,360✔
88
    if (gcref(g->gcroot[i]) != NULL)
525,876✔
89
      gc_markobj(g, gcref(g->gcroot[i]));
362,021✔
90
}
13,484✔
91

92
/* Start a GC cycle and mark the root set. */
93
static void gc_mark_start(global_State *g)
6,762✔
94
{
95
  setgcrefnull(g->gc.gray);
6,762✔
96
  setgcrefnull(g->gc.grayagain);
6,762✔
97
  setgcrefnull(g->gc.weak);
6,762✔
98
  gc_markobj(g, mainthread(g));
6,762✔
99
  gc_markobj(g, tabref(mainthread(g)->env));
6,762✔
100
  gc_marktv(g, &g->registrytv);
6,762✔
101
  gc_mark_gcroot(g);
6,762✔
102
  g->gc.state = GCSpropagate;
6,762✔
103
}
6,762✔
104

105
/* Mark open upvalues. */
106
static void gc_mark_uv(global_State *g)
6,722✔
107
{
108
  GCupval *uv;
6,722✔
109
  for (uv = uvnext(&g->uvhead); uv != &g->uvhead; uv = uvnext(uv)) {
1,573,288✔
110
    lj_assertG(uvprev(uvnext(uv)) == uv && uvnext(uvprev(uv)) == uv,
1,566,566✔
111
               "broken upvalue chain");
112
    if (isgray(obj2gco(uv)))
1,566,566✔
113
      gc_marktv(g, uvval(uv));
1,566,566✔
114
  }
115
}
6,722✔
116

117
/* Mark userdata in mmudata list. */
118
static void gc_mark_mmudata(global_State *g)
6,722✔
119
{
120
  GCobj *root = gcref(g->gc.mmudata);
6,722✔
121
  GCobj *u = root;
6,722✔
122
  if (u) {
6,722✔
123
    do {
1,002,914✔
124
      u = gcnext(u);
1,002,914✔
125
      makewhite(g, u);  /* Could be from previous GC. */
1,002,914✔
126
      gc_mark(g, u);
1,002,914✔
127
    } while (u != root);
1,002,914✔
128
  }
129
}
6,722✔
130

131
/* Separate userdata objects to be finalized to mmudata list. */
132
size_t lj_gc_separateudata(global_State *g, int all)
7,422✔
133
{
134
  size_t m = 0;
7,422✔
135
  GCRef *p = &mainthread(g)->nextgc;
7,422✔
136
  GCobj *o;
7,422✔
137
  while ((o = gcref(*p)) != NULL) {
2,071,693✔
138
    if (!(iswhite(o) || all) || isfinalized(gco2ud(o))) {
2,064,271✔
139
      p = &o->gch.nextgc;  /* Nothing to do. */
1,057,456✔
140
    } else if (!lj_meta_fastg(g, tabref(gco2ud(o)->metatable), MM_gc)) {
1,006,815✔
141
      markfinalized(o);  /* Done, as there's no __gc metamethod. */
2,357✔
142
      p = &o->gch.nextgc;
2,357✔
143
    } else {  /* Otherwise move userdata to be finalized to mmudata list. */
144
      m += sizeudata(gco2ud(o));
1,004,458✔
145
      markfinalized(o);
1,004,458✔
146
      *p = o->gch.nextgc;
1,004,458✔
147
      if (gcref(g->gc.mmudata)) {  /* Link to end of mmudata list. */
1,004,458✔
148
        GCobj *root = gcref(g->gc.mmudata);
998,145✔
149
        setgcrefr(o->gch.nextgc, root->gch.nextgc);
998,145✔
150
        setgcref(root->gch.nextgc, o);
998,145✔
151
        setgcref(g->gc.mmudata, o);
998,145✔
152
      } else {  /* Create circular list. */
153
        setgcref(o->gch.nextgc, o);
6,313✔
154
        setgcref(g->gc.mmudata, o);
6,313✔
155
      }
156
    }
157
  }
158
  return m;
7,422✔
159
}
160

161
/* -- Propagation phase --------------------------------------------------- */
162

163
/* Traverse a table. */
164
static int gc_traverse_tab(global_State *g, GCtab *t)
3,777,872✔
165
{
166
  int weak = 0;
3,777,872✔
167
  cTValue *mode;
3,777,872✔
168
  GCtab *mt = tabref(t->metatable);
3,777,872✔
169
  if (mt)
3,777,872✔
170
    gc_markobj(g, mt);
35,920✔
171
  mode = lj_meta_fastg(g, mt, MM_mode);
3,777,872✔
172
  if (mode && tvisstr(mode)) {  /* Valid __mode field? */
20,508✔
173
    const char *modestr = strVdata(mode);
20,371✔
174
    int c;
20,371✔
175
    while ((c = *modestr++)) {
54,258✔
176
      if (c == 'k') weak |= LJ_GC_WEAKKEY;
33,887✔
177
      else if (c == 'v') weak |= LJ_GC_WEAKVAL;
13,538✔
178
    }
179
    if (weak) {  /* Weak tables are cleared in the atomic phase. */
20,371✔
180
#if LJ_HASFFI
181
      if (gcref(g->gcroot[GCROOT_FFI_FIN]) == obj2gco(t)) {
20,371✔
182
        weak = (int)(~0u & ~LJ_GC_WEAKVAL);
183
      } else
184
#endif
185
      {
186
        t->marked = (uint8_t)((t->marked & ~LJ_GC_WEAK) | weak);
13,540✔
187
        setgcrefr(t->gclist, g->gc.weak);
13,540✔
188
        setgcref(g->gc.weak, obj2gco(t));
13,540✔
189
      }
190
    }
191
  }
192
  if (weak == LJ_GC_WEAK)  /* Nothing to mark if both keys/values are weak. */
28,952✔
193
    return 1;
194
  if (!(weak & LJ_GC_WEAKVAL)) {  /* Mark array part. */
3,764,356✔
195
    MSize i, asize = t->asize;
3,764,334✔
196
    for (i = 0; i < asize; i++)
685,658,370✔
197
      gc_marktv(g, arrayslot(t, i));
681,894,036✔
198
  }
199
  if (t->hmask > 0) {  /* Mark hash part. */
3,764,356✔
200
    Node *node = noderef(t->node);
1,789,113✔
201
    MSize i, hmask = t->hmask;
1,789,113✔
202
    for (i = 0; i <= hmask; i++) {
758,735,173✔
203
      Node *n = &node[i];
756,946,060✔
204
      if (!tvisnil(&n->val)) {  /* Mark non-empty slot. */
756,946,060✔
205
        lj_assertG(!tvisnil(&n->key), "mark of nil key in non-empty slot");
444,647,807✔
206
        if (!(weak & LJ_GC_WEAKKEY)) gc_marktv(g, &n->key);
444,647,807✔
207
        if (!(weak & LJ_GC_WEAKVAL)) gc_marktv(g, &n->val);
756,946,060✔
208
      }
209
    }
210
  }
211
  return weak;
212
}
213

214
/* Traverse a function. */
215
static void gc_traverse_func(global_State *g, GCfunc *fn)
3,618,387✔
216
{
217
  gc_markobj(g, tabref(fn->c.env));
3,618,387✔
218
  if (isluafunc(fn)) {
3,618,387✔
219
    uint32_t i;
2,247,555✔
220
    lj_assertG(fn->l.nupvalues <= funcproto(fn)->sizeuv,
2,247,555✔
221
               "function upvalues out of range");
222
    gc_markobj(g, funcproto(fn));
2,247,555✔
223
    for (i = 0; i < fn->l.nupvalues; i++)  /* Mark Lua function upvalues. */
4,239,027✔
224
      gc_markobj(g, &gcref(fn->l.uvptr[i])->uv);
1,991,472✔
225
  } else {
226
    uint32_t i;
227
    for (i = 0; i < fn->c.nupvalues; i++)  /* Mark C function upvalues. */
1,505,744✔
228
      gc_marktv(g, &fn->c.upvalue[i]);
134,912✔
229
  }
230
}
3,618,387✔
231

232
#if LJ_HASJIT
233
/* Mark a trace. */
234
static void gc_marktrace(global_State *g, TraceNo traceno)
114,524✔
235
{
236
  GCobj *o = obj2gco(traceref(G2J(g), traceno));
114,524✔
237
  lj_assertG(traceno != G2J(g)->cur.traceno, "active trace escaped");
114,524✔
238
  if (iswhite(o)) {
114,524✔
239
    white2gray(o);
64,034✔
240
    setgcrefr(o->gch.gclist, g->gc.gray);
64,034✔
241
    setgcref(g->gc.gray, o);
64,034✔
242
  }
243
}
244

245
/* Traverse a trace. */
246
static void gc_traverse_trace(global_State *g, GCtrace *T)
70,784✔
247
{
248
  IRRef ref;
70,784✔
249
  if (T->traceno == 0) return;
70,784✔
250
  for (ref = T->nk; ref < REF_TRUE; ref++) {
917,628✔
251
    IRIns *ir = &T->ir[ref];
853,594✔
252
    if (ir->o == IR_KGC)
853,594✔
253
      gc_markobj(g, ir_kgc(ir));
311,924✔
254
    if (irt_is64(ir->t) && ir->o != IR_KNULL)
853,594✔
255
      ref++;
471,086✔
256
  }
257
  if (T->link) gc_marktrace(g, T->link);
64,034✔
258
  if (T->nextroot) gc_marktrace(g, T->nextroot);
64,034✔
259
  if (T->nextside) gc_marktrace(g, T->nextside);
64,034✔
260
  gc_markobj(g, gcref(T->startpt));
64,034✔
261
}
262

263
/* The current trace is a GC root while not anchored in the prototype (yet). */
264
#define gc_traverse_curtrace(g)        gc_traverse_trace(g, &G2J(g)->cur)
265
#else
266
#define gc_traverse_curtrace(g)        UNUSED(g)
267
#endif
268

269
/* Traverse a prototype. */
270
static void gc_traverse_proto(global_State *g, GCproto *pt)
590,199✔
271
{
272
  ptrdiff_t i;
590,199✔
273
  gc_mark_str(proto_chunkname(pt));
590,199✔
274
  for (i = -(ptrdiff_t)pt->sizekgc; i < 0; i++)  /* Mark collectable consts. */
3,130,340✔
275
    gc_markobj(g, proto_kgc(pt, i));
2,540,141✔
276
#if LJ_HASJIT
277
  if (pt->trace) gc_marktrace(g, pt->trace);
590,199✔
278
#endif
279
}
590,199✔
280

281
/* Traverse the frame structure of a stack. */
282
static MSize gc_traverse_frames(global_State *g, lua_State *th)
283
{
284
  TValue *frame, *top = th->top-1, *bot = tvref(th->stack);
285
  /* Note: extra vararg frame not skipped, marks function twice (harmless). */
286
  for (frame = th->base-1; frame > bot+LJ_FR2; frame = frame_prev(frame)) {
287
    GCfunc *fn = frame_func(frame);
288
    TValue *ftop = frame;
289
    if (isluafunc(fn)) ftop += funcproto(fn)->framesize;
290
    if (ftop > top) top = ftop;
291
    if (!LJ_FR2) gc_markobj(g, fn);  /* Need to mark hidden function (or L). */
292
  }
293
  top++;  /* Correct bias of -1 (frame == base-1). */
294
  if (top > tvref(th->maxstack)) top = tvref(th->maxstack);
295
  return (MSize)(top - bot);  /* Return minimum needed stack size. */
296
}
297

298
/* Traverse a thread object. */
299
static void gc_traverse_thread(global_State *g, lua_State *th)
1,570,761✔
300
{
301
  TValue *o, *top = th->top;
1,570,761✔
302
  for (o = tvref(th->stack)+1+LJ_FR2; o < top; o++)
11,563,934✔
303
    gc_marktv(g, o);
9,993,173✔
304
  if (g->gc.state == GCSatomic) {
1,570,761✔
305
    top = tvref(th->stack) + th->stacksize;
784,914✔
306
    for (; o < top; o++)  /* Clear unmarked slots. */
31,722,264✔
307
      setnilV(o);
30,937,350✔
308
  }
309
  gc_markobj(g, tabref(th->env));
1,570,761✔
310
  lj_state_shrinkstack(th, gc_traverse_frames(g, th));
1,570,761✔
311
}
1,570,761✔
312

313
/* Propagate one gray object. Traverse it and turn it black. */
314
static size_t propagatemark(global_State *g)
9,621,281✔
315
{
316
  GCobj *o = gcref(g->gc.gray);
9,621,281✔
317
  int gct = o->gch.gct;
9,621,281✔
318
  lj_assertG(isgray(o), "propagation of non-gray object");
9,621,281✔
319
  gray2black(o);
9,621,281✔
320
  setgcrefr(g->gc.gray, o->gch.gclist);  /* Remove from gray list. */
9,621,281✔
321
  if (LJ_LIKELY(gct == ~LJ_TTAB)) {
9,621,281✔
322
    GCtab *t = gco2tab(o);
3,777,872✔
323
    if (gc_traverse_tab(g, t) > 0)
3,777,872✔
324
      black2gray(o);  /* Keep weak tables gray. */
13,540✔
325
    return sizeof(GCtab) + sizeof(TValue) * t->asize +
3,777,872✔
326
                           (t->hmask ? sizeof(Node) * (t->hmask + 1) : 0);
3,777,872✔
327
  } else if (LJ_LIKELY(gct == ~LJ_TFUNC)) {
5,843,409✔
328
    GCfunc *fn = gco2func(o);
3,618,387✔
329
    gc_traverse_func(g, fn);
3,618,387✔
330
    return isluafunc(fn) ? sizeLfunc((MSize)fn->l.nupvalues) :
3,618,387✔
331
                           sizeCfunc((MSize)fn->c.nupvalues);
1,370,832✔
332
  } else if (LJ_LIKELY(gct == ~LJ_TPROTO)) {
2,225,022✔
333
    GCproto *pt = gco2pt(o);
590,199✔
334
    gc_traverse_proto(g, pt);
590,199✔
335
    return pt->sizept;
590,199✔
336
  } else if (LJ_LIKELY(gct == ~LJ_TTHREAD)) {
1,634,823✔
337
    lua_State *th = gco2th(o);
1,570,761✔
338
    setgcrefr(th->gclist, g->gc.grayagain);
1,570,761✔
339
    setgcref(g->gc.grayagain, o);
1,570,761✔
340
    black2gray(o);  /* Threads are never black. */
1,570,761✔
341
    gc_traverse_thread(g, th);
1,570,761✔
342
    return sizeof(lua_State) + sizeof(TValue) * th->stacksize;
1,570,761✔
343
  } else {
344
#if LJ_HASJIT
345
    GCtrace *T = gco2trace(o);
64,062✔
346
    gc_traverse_trace(g, T);
64,062✔
347
    return ((sizeof(GCtrace)+7)&~7) + (T->nins-T->nk)*sizeof(IRIns) +
64,062✔
348
           T->nsnap*sizeof(SnapShot) + T->nsnapmap*sizeof(SnapEntry);
64,062✔
349
#else
350
    lj_assertG(0, "bad GC type %d", gct);
351
    return 0;
352
#endif
353
  }
354
}
355

356
/* Propagate all gray objects. */
357
static size_t gc_propagate_gray(global_State *g)
26,888✔
358
{
359
  size_t m = 0;
13,444✔
360
  while (gcref(g->gc.gray) != NULL)
2,955,829✔
361
    m += propagatemark(g);
2,928,941✔
362
  return m;
26,888✔
363
}
364

365
/* -- Sweep phase --------------------------------------------------------- */
366

367
/* Type of GC free functions. */
368
typedef void (LJ_FASTCALL *GCFreeFunc)(global_State *g, GCobj *o);
369

370
/* GC free functions for LJ_TSTR .. LJ_TUDATA. ORDER LJ_T */
371
static const GCFreeFunc gc_freefunc[] = {
372
  (GCFreeFunc)lj_str_free,
373
  (GCFreeFunc)lj_func_freeuv,
374
  (GCFreeFunc)lj_state_free,
375
  (GCFreeFunc)lj_func_freeproto,
376
  (GCFreeFunc)lj_func_free,
377
#if LJ_HASJIT
378
  (GCFreeFunc)lj_trace_free,
379
#else
380
  (GCFreeFunc)0,
381
#endif
382
#if LJ_HASFFI
383
  (GCFreeFunc)lj_cdata_free,
384
#else
385
  (GCFreeFunc)0,
386
#endif
387
  (GCFreeFunc)lj_tab_free,
388
  (GCFreeFunc)lj_udata_free
389
};
390

391
/* Full sweep of a GC list. */
392
#define gc_fullsweep(g, p)        gc_sweep(g, (p), ~(uint32_t)0)
393

394
/* Partial sweep of a GC list. */
395
static GCRef *gc_sweep(global_State *g, GCRef *p, uint32_t lim)
8,373,356✔
396
{
397
  /* Mask with other white and LJ_GC_FIXED. Or LJ_GC_SFIXED on shutdown. */
398
  int ow = otherwhite(g);
8,373,356✔
399
  GCobj *o;
8,373,356✔
400
  while ((o = gcref(*p)) != NULL && lim-- > 0) {
342,217,208✔
401
    if (o->gch.gct == ~LJ_TTHREAD)  /* Need to sweep open upvalues, too. */
333,843,852✔
402
      gc_fullsweep(g, &gco2th(o)->openupval);
1,478,507✔
403
    if (((o->gch.marked ^ LJ_GC_WHITES) & ow)) {  /* Black or current white? */
333,843,852✔
404
      lj_assertG(!isdead(g, o) || (o->gch.marked & LJ_GC_FIXED),
192,627,680✔
405
                 "sweep of undead object");
406
      makewhite(g, o);  /* Value is alive, change to the current white. */
192,627,680✔
407
      p = &o->gch.nextgc;
192,627,680✔
408
    } else {  /* Otherwise value is dead, free it. */
409
      lj_assertG(isdead(g, o) || ow == LJ_GC_SFIXED,
141,216,172✔
410
                 "sweep of unlive object");
411
      setgcrefr(*p, o->gch.nextgc);
141,216,172✔
412
      if (o == gcref(g->gc.root))
141,216,172✔
413
        setgcrefr(g->gc.root, o->gch.nextgc);  /* Adjust list anchor. */
×
414
      gc_freefunc[o->gch.gct - ~LJ_TSTR](g, o);
141,216,172✔
415
    }
416
  }
417
  return p;
8,373,356✔
418
}
419

420
/* Full sweep of a string chain. */
421
static GCRef *gc_sweep_str_chain(global_State *g, GCRef *p)
13,580,490✔
422
{
423
  /* Mask with other white and LJ_GC_FIXED. Or LJ_GC_SFIXED on shutdown. */
424
  int ow = otherwhite(g);
13,580,490✔
425
  GCobj *o;
13,580,490✔
426
  while ((o = gcref(*p)) != NULL) {
20,636,159✔
427
    if (((o->gch.marked ^ LJ_GC_WHITES) & ow)) {  /* Black or current white? */
7,055,669✔
428
      lj_assertG(!isdead(g, o) || (o->gch.marked & LJ_GC_FIXED),
6,413,361✔
429
                 "sweep of undead string");
430
      makewhite(g, o);  /* Value is alive, change to the current white. */
6,413,361✔
431
#if LUAJIT_SMART_STRINGS
432
      if (strsmart(&o->str)) {
6,413,361✔
433
        /* must match lj_str_new */
434
        bloomset(g->strbloom.next[0], o->str.hash >> (sizeof(o->str.hash)*8-6));
21,153✔
435
        bloomset(g->strbloom.next[1], o->str.strflags);
21,153✔
436
      }
437
#endif
438
      p = &o->gch.nextgc;
6,413,361✔
439
    } else {  /* Otherwise value is dead, free it. */
440
      lj_assertG(isdead(g, o) || ow == LJ_GC_SFIXED,
642,308✔
441
                 "sweep of unlive string");
442
      setgcrefr(*p, o->gch.nextgc);
642,308✔
443
      lj_str_free(g, &o->str);
642,308✔
444
    }
445
  }
446
  return p;
13,580,490✔
447
}
448

449
/* Check whether we can clear a key or a value slot from a table. */
450
static int gc_mayclear(cTValue *o, int val)
451
{
452
  if (tvisgcv(o)) {  /* Only collectable objects can be weak references. */
453
    if (tvisstr(o)) {  /* But strings cannot be used as weak references. */
454
      gc_mark_str(strV(o));  /* And need to be marked. */
455
      return 0;
456
    }
457
    if (iswhite(gcV(o)))
458
      return 1;  /* Object is about to be collected. */
459
    if (tvisudata(o) && val && isfinalized(udataV(o)))
460
      return 1;  /* Finalized userdata is dropped only from values. */
461
  }
462
  return 0;  /* Cannot clear. */
463
}
464

465
/* Clear collected entries from weak tables. */
466
static void gc_clearweak(global_State *g, GCobj *o)
467
{
468
  UNUSED(g);
469
  while (o) {
470
    GCtab *t = gco2tab(o);
471
    lj_assertG((t->marked & LJ_GC_WEAK), "clear of non-weak table");
472
    if ((t->marked & LJ_GC_WEAKVAL)) {
473
      MSize i, asize = t->asize;
474
      for (i = 0; i < asize; i++) {
475
        /* Clear array slot when value is about to be collected. */
476
        TValue *tv = arrayslot(t, i);
477
        if (gc_mayclear(tv, 1))
478
          setnilV(tv);
479
      }
480
    }
481
    if (t->hmask > 0) {
482
      Node *node = noderef(t->node);
483
      MSize i, hmask = t->hmask;
484
      for (i = 0; i <= hmask; i++) {
485
        Node *n = &node[i];
486
        /* Clear hash slot when key or value is about to be collected. */
487
        if (!tvisnil(&n->val) && (gc_mayclear(&n->key, 0) ||
488
                                  gc_mayclear(&n->val, 1)))
489
          setnilV(&n->val);
490
      }
491
    }
492
    o = gcref(t->gclist);
493
  }
494
}
495

496
/* Call a userdata or cdata finalizer. */
497
static void gc_call_finalizer(global_State *g, lua_State *L,
498
                              cTValue *mo, GCobj *o)
499
{
500
  /* Save and restore lots of state around the __gc callback. */
501
  uint8_t oldh = hook_save(g);
502
  GCSize oldt = g->gc.threshold;
503
  int errcode;
504
  TValue *top;
505
  lj_trace_abort(g);
506
  hook_entergc(g);  /* Disable hooks and new traces during __gc. */
507
  if (LJ_HASPROFILE && (oldh & HOOK_PROFILE)) lj_dispatch_update(g);
508
  g->gc.threshold = LJ_MAX_MEM;  /* Prevent GC steps. */
509
  top = L->top;
510
  copyTV(L, top++, mo);
511
  if (LJ_FR2) setnilV(top++);
512
  setgcV(L, top, o, ~o->gch.gct);
513
  L->top = top+1;
514
  errcode = lj_vm_pcall(L, top, 1+0, -1);  /* Stack: |mo|o| -> | */
515
  hook_restore(g, oldh);
516
  if (LJ_HASPROFILE && (oldh & HOOK_PROFILE)) lj_dispatch_update(g);
517
  g->gc.threshold = oldt;  /* Restore GC threshold. */
518
  if (errcode) {
519
    ptrdiff_t errobj = savestack(L, L->top-1);  /* Stack may be resized. */
520
    lj_vmevent_send(L, ERRFIN,
521
      copyTV(L, L->top++, restorestack(L, errobj));
522
    );
523
    L->top--;
524
  }
525
}
526

527
/* Finalize one userdata or cdata object from the mmudata list. */
528
static void gc_finalize(lua_State *L)
1,044,125✔
529
{
530
  global_State *g = G(L);
1,044,125✔
531
  GCobj *o = gcnext(gcref(g->gc.mmudata));
1,044,125✔
532
  cTValue *mo;
1,044,125✔
533
  lj_assertG(tvref(g->jit_base) == NULL, "finalizer called on trace");
1,044,125✔
534
  /* Unchain from list of userdata to be finalized. */
535
  if (o == gcref(g->gc.mmudata))
1,044,125✔
536
    setgcrefnull(g->gc.mmudata);
6,383✔
537
  else
538
    setgcrefr(gcref(g->gc.mmudata)->gch.nextgc, o->gch.nextgc);
1,037,742✔
539
#if LJ_HASFFI
540
  if (o->gch.gct == ~LJ_TCDATA) {
1,044,125✔
541
    TValue tmp, *tv;
39,667✔
542
    /* Add cdata back to the GC list and make it white. */
543
    setgcrefr(o->gch.nextgc, g->gc.root);
39,667✔
544
    setgcref(g->gc.root, o);
39,667✔
545
    makewhite(g, o);
39,667✔
546
    o->gch.marked &= (uint8_t)~LJ_GC_CDATA_FIN;
39,667✔
547
    /* Resolve finalizer. */
548
    setcdataV(L, &tmp, gco2cd(o));
39,667✔
549
    tv = lj_tab_set(L, tabref(g->gcroot[GCROOT_FFI_FIN]), &tmp);
39,667✔
550
    if (!tvisnil(tv)) {
39,667✔
551
      copyTV(L, &tmp, tv);
39,667✔
552
      setnilV(tv);  /* Clear entry in finalizer table. */
39,667✔
553
      gc_call_finalizer(g, L, &tmp, o);
39,667✔
554
    }
555
    return;
39,667✔
556
  }
557
#endif
558
  /* Add userdata back to the main userdata list and make it white. */
559
  setgcrefr(o->gch.nextgc, mainthread(g)->nextgc);
1,004,458✔
560
  setgcref(mainthread(g)->nextgc, o);
1,004,458✔
561
  makewhite(g, o);
1,004,458✔
562
  /* Resolve the __gc metamethod. */
563
  mo = lj_meta_fastg(g, tabref(gco2ud(o)->metatable), MM_gc);
1,004,458✔
564
  if (mo)
1,004,458✔
565
    gc_call_finalizer(g, L, mo, o);
1,004,458✔
566
}
567

568
/* Finalize all userdata objects from mmudata list. */
569
void lj_gc_finalize_udata(lua_State *L)
355✔
570
{
571
  while (gcref(G(L)->gc.mmudata) != NULL)
2,099✔
572
    gc_finalize(L);
1,744✔
573
}
355✔
574

575
#if LJ_HASFFI
576
/* Finalize all cdata objects from finalizer table. */
577
void lj_gc_finalize_cdata(lua_State *L)
355✔
578
{
579
  global_State *g = G(L);
355✔
580
  GCtab *t = tabref(g->gcroot[GCROOT_FFI_FIN]);
355✔
581
  Node *node = noderef(t->node);
355✔
582
  ptrdiff_t i;
355✔
583
  setgcrefnull(t->metatable);  /* Mark finalizer table as disabled. */
355✔
584
  for (i = (ptrdiff_t)t->hmask; i >= 0; i--)
67,143,591✔
585
    if (!tvisnil(&node[i].val) && tviscdata(&node[i].key)) {
67,143,236✔
586
      GCobj *o = gcV(&node[i].key);
67,109,425✔
587
      TValue tmp;
67,109,425✔
588
      makewhite(g, o);
67,109,425✔
589
      o->gch.marked &= (uint8_t)~LJ_GC_CDATA_FIN;
67,109,425✔
590
      copyTV(L, &tmp, &node[i].val);
67,109,425✔
591
      setnilV(&node[i].val);
67,109,425✔
592
      gc_call_finalizer(g, L, &tmp, o);
67,109,425✔
593
    }
594
}
355✔
595
#endif
596

597
/* Free all remaining GC objects. */
598
void lj_gc_freeall(global_State *g)
346✔
599
{
600
  MSize i, strmask;
346✔
601
  /* Free everything, except super-fixed objects (the main thread). */
602
  g->gc.currentwhite = LJ_GC_WHITES | LJ_GC_SFIXED;
346✔
603
  gc_fullsweep(g, &g->gc.root);
346✔
604
  strmask = g->strmask;
346✔
605
  for (i = 0; i <= strmask; i++)  /* Free all string hash chains. */
324,954✔
606
    gc_fullsweep(g, &g->strhash[i]);
324,608✔
607
}
346✔
608

609
/* -- Collector ----------------------------------------------------------- */
610

611
/* Atomic part of the GC cycle, transitioning from mark to sweep phase. */
612
static void atomic(global_State *g, lua_State *L)
6,722✔
613
{
614
  size_t udsize;
6,722✔
615

616
  gc_mark_uv(g);  /* Need to remark open upvalues (the thread may be dead). */
6,722✔
617
  gc_propagate_gray(g);  /* Propagate any left-overs. */
6,722✔
618

619
  setgcrefr(g->gc.gray, g->gc.weak);  /* Empty the list of weak tables. */
6,722✔
620
  setgcrefnull(g->gc.weak);
6,722✔
621
  lj_assertG(!iswhite(obj2gco(mainthread(g))), "main thread turned white");
6,722✔
622
  gc_markobj(g, L);  /* Mark running thread. */
6,722✔
623
  gc_traverse_curtrace(g);  /* Traverse current trace. */
6,722✔
624
  gc_mark_gcroot(g);  /* Mark GC roots (again). */
6,722✔
625
  gc_propagate_gray(g);  /* Propagate all of the above. */
6,722✔
626

627
  setgcrefr(g->gc.gray, g->gc.grayagain);  /* Empty the 2nd chance list. */
6,722✔
628
  setgcrefnull(g->gc.grayagain);
6,722✔
629
  gc_propagate_gray(g);  /* Propagate it. */
6,722✔
630

631
  udsize = lj_gc_separateudata(g, 0);  /* Separate userdata to be finalized. */
6,722✔
632
  gc_mark_mmudata(g);  /* Mark them. */
6,722✔
633
  udsize += gc_propagate_gray(g);  /* And propagate the marks. */
6,722✔
634

635
  /* All marking done, clear weak tables. */
636
  gc_clearweak(g, gcref(g->gc.weak));
6,722✔
637

638
  lj_buf_shrink(L, &g->tmpbuf);  /* Shrink temp buffer. */
6,722✔
639

640
  /* Prepare for sweep phase. */
641
  g->gc.currentwhite = (uint8_t)otherwhite(g);  /* Flip current white. */
6,722✔
642
  g->strempty.marked = g->gc.currentwhite;
6,722✔
643
  setmref(g->gc.sweep, &g->gc.root);
6,722✔
644
  g->gc.estimate = g->gc.total - (GCSize)udsize;  /* Initial estimate. */
6,722✔
645
}
6,722✔
646

647
/* GC state machine. Returns a cost estimate for each step performed. */
648
static size_t gc_onestep(lua_State *L)
27,911,776✔
649
{
650
  global_State *g = G(L);
27,911,776✔
651
  g->gc.state_count[g->gc.state]++;
27,911,776✔
652
  switch (g->gc.state) {
27,911,776✔
653
  case GCSpause:
6,762✔
654
    gc_mark_start(g);  /* Start a new GC cycle by marking all GC roots. */
6,762✔
655
    return 0;
6,762✔
656
  case GCSpropagate:
6,699,062✔
657
    if (gcref(g->gc.gray) != NULL)
6,699,062✔
658
      return propagatemark(g);  /* Propagate one gray object. */
6,692,340✔
659
    g->gc.state = GCSatomic;  /* End of mark phase. */
6,722✔
660
    return 0;
6,722✔
661
  case GCSatomic:
6,879✔
662
    if (tvref(g->jit_base))  /* Don't run atomic phase on trace. */
6,879✔
663
      return LJ_MAX_MEM;
664
    atomic(g, L);
6,722✔
665
    g->gc.state = GCSsweepstring;  /* Start of sweep phase. */
6,722✔
666
    g->gc.sweepstr = 0;
6,722✔
667
#if LUAJIT_SMART_STRINGS
668
    g->strbloom.next[0] = 0;
6,722✔
669
    g->strbloom.next[1] = 0;
6,722✔
670
#endif
671
    return 0;
6,722✔
672
  case GCSsweepstring: {
13,580,490✔
673
    GCSize old = g->gc.total;
13,580,490✔
674
    gc_sweep_str_chain(g, &g->strhash[g->gc.sweepstr++]);  /* Sweep one chain. */
13,580,490✔
675
    if (g->gc.sweepstr > g->strmask) {
13,580,490✔
676
      g->gc.state = GCSsweep;  /* All string hash chains sweeped. */
7,685✔
677
#if LUAJIT_SMART_STRINGS
678
      g->strbloom.cur[0] = g->strbloom.next[0];
7,685✔
679
      g->strbloom.cur[1] = g->strbloom.next[1];
7,685✔
680
#endif
681
    }
682
    lj_assertG(old >= g->gc.total, "sweep increased memory");
13,580,490✔
683
    g->gc.estimate -= old - g->gc.total;
13,580,490✔
684
    return GCSWEEPCOST;
13,580,490✔
685
    }
686
  case GCSsweep: {
6,569,895✔
687
    GCSize old = g->gc.total;
6,569,895✔
688
    setmref(g->gc.sweep, gc_sweep(g, mref(g->gc.sweep, GCRef), GCSWEEPMAX));
6,569,895✔
689
    lj_assertG(old >= g->gc.total, "sweep increased memory");
6,569,895✔
690
    g->gc.estimate -= old - g->gc.total;
6,569,895✔
691
    if (gcref(*mref(g->gc.sweep, GCRef)) == NULL) {
6,569,895✔
692
      if (g->strnum <= (g->strmask >> 2) && g->strmask > LJ_MIN_STRTAB*2-1)
7,683✔
693
        lj_str_resize(L, g->strmask >> 1);  /* Shrink string table. */
93✔
694
      if (gcref(g->gc.mmudata)) {  /* Need any finalizations? */
7,683✔
695
        g->gc.state = GCSfinalize;
6,039✔
696
      } else {  /* Otherwise skip this phase to help the JIT. */
697
        g->gc.state = GCSpause;  /* End of GC cycle. */
1,644✔
698
        g->gc.debt = 0;
1,644✔
699
      }
700
    }
701
    return GCSWEEPMAX*GCSWEEPCOST;
702
    }
703
  case GCSfinalize:
1,048,688✔
704
    if (gcref(g->gc.mmudata) != NULL) {
1,048,688✔
705
      if (tvref(g->jit_base))  /* Don't call finalizers on trace. */
1,042,653✔
706
        return LJ_MAX_MEM;
707
      gc_finalize(L);  /* Finalize one userdata object. */
1,042,381✔
708
      if (g->gc.estimate > GCFINALIZECOST)
1,042,381✔
709
        g->gc.estimate -= GCFINALIZECOST;
1,042,381✔
710
      return GCFINALIZECOST;
1,042,381✔
711
    }
712
    g->gc.state = GCSpause;  /* End of GC cycle. */
6,035✔
713
    g->gc.debt = 0;
6,035✔
714
    return 0;
6,035✔
715
  default:
716
    lj_assertG(0, "bad GC state");
717
    return 0;
718
  }
719
}
720

721
/* Perform a limited amount of incremental GC steps. */
722
int LJ_FASTCALL lj_gc_step(lua_State *L)
892,438✔
723
{
724
  global_State *g = G(L);
892,438✔
725
  GCSize lim;
892,438✔
726
  int32_t ostate = g->vmstate;
892,438✔
727
  setvmstate(g, GC);
892,438✔
728
  lim = (GCSTEPSIZE/100) * g->gc.stepmul;
892,438✔
729
  if (lim == 0)
892,438✔
730
    lim = LJ_MAX_MEM;
×
731
  if (g->gc.total > g->gc.threshold)
892,438✔
732
    g->gc.debt += g->gc.total - g->gc.threshold;
866,984✔
733
  do {
11,441,876✔
734
    lim -= (GCSize)gc_onestep(L);
11,441,876✔
735
    if (g->gc.state == GCSpause) {
11,441,876✔
736
      g->gc.threshold = (g->gc.estimate/100) * g->gc.pause;
5,726✔
737
      g->vmstate = ostate;
5,726✔
738
      return 1;  /* Finished a GC cycle. */
5,726✔
739
    }
740
  } while (sizeof(lim) == 8 ? ((int64_t)lim > 0) : ((int32_t)lim > 0));
11,436,150✔
741
  if (g->gc.debt < GCSTEPSIZE) {
886,712✔
742
    g->gc.threshold = g->gc.total + GCSTEPSIZE;
302,308✔
743
    g->vmstate = ostate;
302,308✔
744
    return -1;
302,308✔
745
  } else {
746
    g->gc.debt -= GCSTEPSIZE;
584,404✔
747
    g->gc.threshold = g->gc.total;
584,404✔
748
    g->vmstate = ostate;
584,404✔
749
    return 0;
584,404✔
750
  }
751
}
752

753
/* Ditto, but fix the stack top first. */
754
void LJ_FASTCALL lj_gc_step_fixtop(lua_State *L)
72,239✔
755
{
756
  if (curr_funcisL(L)) L->top = curr_topL(L);
72,239✔
757
  lj_gc_step(L);
72,239✔
758
}
72,239✔
759

760
#if LJ_HASJIT
761
/* Perform multiple GC steps. Called from JIT-compiled code. */
762
int LJ_FASTCALL lj_gc_step_jit(global_State *g, MSize steps)
616,464✔
763
{
764
  lua_State *L = gco2th(gcref(g->cur_L));
616,464✔
765
  L->base = tvref(G(L)->jit_base);
616,464✔
766
  L->top = curr_topL(L);
616,464✔
767
  while (steps-- > 0 && lj_gc_step(L) == 0)
1,234,654✔
768
    ;
1,080,833✔
769
  /* Return 1 to force a trace exit. */
770
  return (G(L)->gc.state == GCSatomic || G(L)->gc.state == GCSfinalize);
616,464✔
771
}
772
#endif
773

774
/* Perform a full GC cycle. */
775
void lj_gc_fullgc(lua_State *L)
978✔
776
{
777
  global_State *g = G(L);
978✔
778
  int32_t ostate = g->vmstate;
978✔
779
  setvmstate(g, GC);
978✔
780
  if (g->gc.state <= GCSatomic) {  /* Caught somewhere in the middle. */
978✔
781
    setmref(g->gc.sweep, &g->gc.root);  /* Sweep everything (preserving it). */
971✔
782
    setgcrefnull(g->gc.gray);  /* Reset lists from partial propagation. */
971✔
783
    setgcrefnull(g->gc.grayagain);
971✔
784
    setgcrefnull(g->gc.weak);
971✔
785
    g->gc.state = GCSsweepstring;  /* Fast forward to the sweep phase. */
971✔
786
    g->gc.sweepstr = 0;
971✔
787
  }
788
  while (g->gc.state == GCSsweepstring || g->gc.state == GCSsweep)
6,155,317✔
789
    gc_onestep(L);  /* Finish sweep. */
6,154,339✔
790
  lj_assertG(g->gc.state == GCSfinalize || g->gc.state == GCSpause,
978✔
791
             "bad GC state");
792
  /* Now perform a full GC. */
793
  g->gc.state = GCSpause;
978✔
794
  do { gc_onestep(L); } while (g->gc.state != GCSpause);
10,315,561✔
795
  g->gc.threshold = (g->gc.estimate/100) * g->gc.pause;
978✔
796
  g->vmstate = ostate;
978✔
797
}
978✔
798

799
/* -- Write barriers ------------------------------------------------------ */
800

801
/* Move the GC propagation frontier forward. */
802
void lj_gc_barrierf(global_State *g, GCobj *o, GCobj *v)
5,031✔
803
{
804
  lj_assertG(isblack(o) && iswhite(v) && !isdead(g, v) && !isdead(g, o),
5,031✔
805
             "bad object states for forward barrier");
806
  lj_assertG(g->gc.state != GCSfinalize && g->gc.state != GCSpause,
5,031✔
807
             "bad GC state");
808
  lj_assertG(o->gch.gct != ~LJ_TTAB, "barrier object is not a table");
5,031✔
809
  /* Preserve invariant during propagation. Otherwise it doesn't matter. */
810
  if (g->gc.state == GCSpropagate || g->gc.state == GCSatomic)
5,031✔
811
    gc_mark(g, v);  /* Move frontier forward. */
2,487✔
812
  else
813
    makewhite(g, o);  /* Make it white to avoid the following barrier. */
2,544✔
814
}
×
815

816
/* Specialized barrier for closed upvalue. Pass &uv->tv. */
817
void LJ_FASTCALL lj_gc_barrieruv(global_State *g, TValue *tv)
21,100✔
818
{
819
#define TV2MARKED(x) \
820
  (*((uint8_t *)(x) - offsetof(GCupval, tv) + offsetof(GCupval, marked)))
821
  if (g->gc.state == GCSpropagate || g->gc.state == GCSatomic)
21,100✔
822
    gc_mark(g, gcV(tv));
21,096✔
823
  else
824
    TV2MARKED(tv) = (TV2MARKED(tv) & (uint8_t)~LJ_GC_COLORS) | curwhite(g);
4✔
825
#undef TV2MARKED
826
}
21,100✔
827

828
/* Close upvalue. Also needs a write barrier. */
829
void lj_gc_closeuv(global_State *g, GCupval *uv)
15,244✔
830
{
831
  GCobj *o = obj2gco(uv);
15,244✔
832
  /* Copy stack slot to upvalue itself and point to the copy. */
833
  copyTV(mainthread(g), &uv->tv, uvval(uv));
15,244✔
834
  setmref(uv->v, &uv->tv);
15,244✔
835
  uv->closed = 1;
15,244✔
836
  setgcrefr(o->gch.nextgc, g->gc.root);
15,244✔
837
  setgcref(g->gc.root, o);
15,244✔
838
  if (isgray(o)) {  /* A closed upvalue is never gray, so fix this. */
15,244✔
839
    if (g->gc.state == GCSpropagate || g->gc.state == GCSatomic) {
546✔
840
      gray2black(o);  /* Make it black and preserve invariant. */
223✔
841
      if (tviswhite(&uv->tv))
223✔
842
        lj_gc_barrierf(g, o, gcV(&uv->tv));
×
843
    } else {
844
      makewhite(g, o);  /* Make it white, i.e. sweep the upvalue. */
323✔
845
      lj_assertG(g->gc.state != GCSfinalize && g->gc.state != GCSpause,
15,244✔
846
                 "bad GC state");
847
    }
848
  }
849
}
15,244✔
850

851
#if LJ_HASJIT
852
/* Mark a trace if it's saved during the propagation phase. */
853
void lj_gc_barriertrace(global_State *g, uint32_t traceno)
20,418✔
854
{
855
  if (g->gc.state == GCSpropagate || g->gc.state == GCSatomic)
20,418✔
856
    gc_marktrace(g, traceno);
1,609✔
857
}
20,418✔
858
#endif
859

860
/* -- Allocator ----------------------------------------------------------- */
861

862
/* Call pluggable memory allocator to allocate or resize a fragment. */
863
void *lj_mem_realloc(lua_State *L, void *p, GCSize osz, GCSize nsz)
4,639,440✔
864
{
865
  global_State *g = G(L);
4,639,440✔
866
  lj_assertG((osz == 0) == (p == NULL), "realloc API violation");
4,639,440✔
867

868
  setgcref(g->mem_L, obj2gco(L));
4,639,440✔
869
  p = g->allocf(g->allocd, p, osz, nsz);
4,639,440✔
870
  if (p == NULL && nsz > 0)
4,639,440✔
871
    lj_err_mem(L);
3✔
872
  lj_assertG((nsz == 0) == (p == NULL), "allocf API violation");
4,639,437✔
873
  lj_assertG(checkptrGC(p),
4,639,437✔
874
             "allocated memory address %p outside required range", p);
875
  g->gc.total = (g->gc.total - osz) + nsz;
4,639,437✔
876
  g->gc.allocated += nsz;
4,639,437✔
877
  g->gc.freed += osz;
4,639,437✔
878
  return p;
4,639,437✔
879
}
880

881
/* Allocate new GC object and link it to the root set. */
882
void * LJ_FASTCALL lj_mem_newgco(lua_State *L, GCSize size)
139,886,011✔
883
{
884
  global_State *g = G(L);
139,886,011✔
885
  GCobj *o;
139,886,011✔
886

887
  setgcref(g->mem_L, obj2gco(L));
139,886,011✔
888
  o = (GCobj *)g->allocf(g->allocd, NULL, 0, size);
139,886,011✔
889
  if (o == NULL)
139,886,011✔
890
    lj_err_mem(L);
×
891
  lj_assertG(checkptrGC(o),
139,886,011✔
892
             "allocated memory address %p outside required range", o);
893
  g->gc.total += size;
139,886,011✔
894
  g->gc.allocated += size;
139,886,011✔
895
  setgcrefr(o->gch.nextgc, g->gc.root);
139,886,011✔
896
  setgcref(g->gc.root, o);
139,886,011✔
897
  newwhite(g, o);
139,886,011✔
898
  return o;
139,886,011✔
899
}
900

901
/* Resize growable vector. */
902
void *lj_mem_grow(lua_State *L, void *p, MSize *szp, MSize lim, MSize esz)
181,781✔
903
{
904
  MSize sz = (*szp) << 1;
181,781✔
905
  if (sz < LJ_MIN_VECSZ)
181,781✔
906
    sz = LJ_MIN_VECSZ;
907
  if (sz > lim)
181,781✔
908
    sz = lim;
909
  p = lj_mem_realloc(L, p, (*szp)*esz, sz*esz);
181,781✔
910
  *szp = sz;
181,780✔
911
  return p;
181,780✔
912
}
913

STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc