• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

tarantool / luajit / 7262987147

19 Dec 2023 02:10PM UTC coverage: 88.225% (-0.4%) from 88.616%
7262987147

push

github

fckxorg
test: add tests for debugging extensions

This patch adds tests for LuaJIT debugging
extensions for lldb and gdb.

5336 of 5969 branches covered (0.0%)

Branch coverage included in aggregate %.

20475 of 23287 relevant lines covered (87.92%)

1285545.26 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

98.84
/src/lj_gc.c
1
/*
2
** Garbage collector.
3
** Copyright (C) 2005-2017 Mike Pall. See Copyright Notice in luajit.h
4
**
5
** Major portions taken verbatim or adapted from the Lua interpreter.
6
** Copyright (C) 1994-2008 Lua.org, PUC-Rio. See Copyright Notice in lua.h
7
*/
8

9
#define lj_gc_c
10
#define LUA_CORE
11

12
#include "lj_obj.h"
13
#include "lj_gc.h"
14
#include "lj_err.h"
15
#include "lj_buf.h"
16
#include "lj_str.h"
17
#include "lj_tab.h"
18
#include "lj_func.h"
19
#include "lj_udata.h"
20
#include "lj_meta.h"
21
#include "lj_state.h"
22
#include "lj_frame.h"
23
#if LJ_HASFFI
24
#include "lj_ctype.h"
25
#include "lj_cdata.h"
26
#endif
27
#include "lj_trace.h"
28
#include "lj_dispatch.h"
29
#include "lj_vm.h"
30

31
#define GCSTEPSIZE        1024u
32
#define GCSWEEPMAX        40
33
#define GCSWEEPCOST        10
34
#define GCFINALIZECOST        100
35

36
/* Macros to set GCobj colors and flags. */
37
#define white2gray(x)                ((x)->gch.marked &= (uint8_t)~LJ_GC_WHITES)
38
#define gray2black(x)                ((x)->gch.marked |= LJ_GC_BLACK)
39
#define isfinalized(u)                ((u)->marked & LJ_GC_FINALIZED)
40

41
/* -- Mark phase ---------------------------------------------------------- */
42

43
/* Mark a TValue (if needed). */
44
#define gc_marktv(g, tv) \
45
  { lj_assertG(!tvisgcv(tv) || (~itype(tv) == gcval(tv)->gch.gct), \
46
               "TValue and GC type mismatch"); \
47
    if (tviswhite(tv)) gc_mark(g, gcV(tv)); }
48

49
/* Mark a GCobj (if needed). */
50
#define gc_markobj(g, o) \
51
  { if (iswhite(obj2gco(o))) gc_mark(g, obj2gco(o)); }
52

53
/* Mark a string object. */
54
#define gc_mark_str(s)                ((s)->marked &= (uint8_t)~LJ_GC_WHITES)
55

56
/* Mark a white GCobj. */
57
static void gc_mark(global_State *g, GCobj *o)
144,634,841✔
58
{
59
  int gct = o->gch.gct;
144,640,574✔
60
  lj_assertG(iswhite(o), "mark of non-white object");
144,640,574✔
61
  lj_assertG(!isdead(g, o), "mark of dead object");
144,640,574✔
62
  white2gray(o);
144,640,574✔
63
  if (LJ_UNLIKELY(gct == ~LJ_TUDATA)) {
144,640,574✔
64
    GCtab *mt = tabref(gco2ud(o)->metatable);
1,055,116✔
65
    gray2black(o);  /* Userdata are never gray. */
1,055,116✔
66
    if (mt) gc_markobj(g, mt);
1,055,116✔
67
    gc_markobj(g, tabref(gco2ud(o)->env));
1,055,116✔
68
  } else if (LJ_UNLIKELY(gct == ~LJ_TUPVAL)) {
143,585,458✔
69
    GCupval *uv = gco2uv(o);
1,677,172✔
70
    gc_marktv(g, uvval(uv));
1,677,172✔
71
    if (uv->closed)
1,677,172✔
72
      gray2black(o);  /* Closed upvalues are never gray. */
112,676✔
73
  } else if (gct != ~LJ_TSTR && gct != ~LJ_TCDATA) {
141,908,286✔
74
    lj_assertG(gct == ~LJ_TFUNC || gct == ~LJ_TTAB ||
7,427,071✔
75
               gct == ~LJ_TTHREAD || gct == ~LJ_TPROTO || gct == ~LJ_TTRACE,
76
               "bad GC type %d", gct);
77
    setgcrefr(o->gch.gclist, g->gc.gray);
7,427,071✔
78
    setgcref(g->gc.gray, o);
7,427,071✔
79
  }
80
}
144,634,841✔
81

82
/* Mark GC roots. */
83
static void gc_mark_gcroot(global_State *g)
12,827✔
84
{
85
  ptrdiff_t i;
12,827✔
86
  for (i = 0; i < GCROOT_MAX; i++)
500,253✔
87
    if (gcref(g->gcroot[i]) != NULL)
487,426✔
88
      gc_markobj(g, gcref(g->gcroot[i]));
331,641✔
89
}
12,827✔
90

91
/* Start a GC cycle and mark the root set. */
92
static void gc_mark_start(global_State *g)
6,426✔
93
{
94
  setgcrefnull(g->gc.gray);
6,426✔
95
  setgcrefnull(g->gc.grayagain);
6,426✔
96
  setgcrefnull(g->gc.weak);
6,426✔
97
  gc_markobj(g, mainthread(g));
6,426✔
98
  gc_markobj(g, tabref(mainthread(g)->env));
6,426✔
99
  gc_marktv(g, &g->registrytv);
6,426✔
100
  gc_mark_gcroot(g);
6,426✔
101
  g->gc.state = GCSpropagate;
6,426✔
102
}
6,426✔
103

104
/* Mark open upvalues. */
105
static void gc_mark_uv(global_State *g)
6,401✔
106
{
107
  GCupval *uv;
6,401✔
108
  for (uv = uvnext(&g->uvhead); uv != &g->uvhead; uv = uvnext(uv)) {
1,570,854✔
109
    lj_assertG(uvprev(uvnext(uv)) == uv && uvnext(uvprev(uv)) == uv,
1,564,453✔
110
               "broken upvalue chain");
111
    if (isgray(obj2gco(uv)))
1,564,453✔
112
      gc_marktv(g, uvval(uv));
1,564,453✔
113
  }
114
}
6,401✔
115

116
/* Mark userdata in mmudata list. */
117
static void gc_mark_mmudata(global_State *g)
6,401✔
118
{
119
  GCobj *root = gcref(g->gc.mmudata);
6,401✔
120
  GCobj *u = root;
6,401✔
121
  if (u) {
6,401✔
122
    do {
1,002,713✔
123
      u = gcnext(u);
1,002,713✔
124
      makewhite(g, u);  /* Could be from previous GC. */
1,002,713✔
125
      gc_mark(g, u);
1,002,713✔
126
    } while (u != root);
1,002,713✔
127
  }
128
}
6,401✔
129

130
/* Separate userdata objects to be finalized to mmudata list. */
131
size_t lj_gc_separateudata(global_State *g, int all)
6,865✔
132
{
133
  size_t m = 0;
6,865✔
134
  GCRef *p = &mainthread(g)->nextgc;
6,865✔
135
  GCobj *o;
6,865✔
136
  while ((o = gcref(*p)) != NULL) {
2,069,760✔
137
    if (!(iswhite(o) || all) || isfinalized(gco2ud(o))) {
2,062,895✔
138
      p = &o->gch.nextgc;  /* Nothing to do. */
1,056,773✔
139
    } else if (!lj_meta_fastg(g, tabref(gco2ud(o)->metatable), MM_gc)) {
1,006,122✔
140
      markfinalized(o);  /* Done, as there's no __gc metamethod. */
2,239✔
141
      p = &o->gch.nextgc;
2,239✔
142
    } else {  /* Otherwise move userdata to be finalized to mmudata list. */
143
      m += sizeudata(gco2ud(o));
1,003,883✔
144
      markfinalized(o);
1,003,883✔
145
      *p = o->gch.nextgc;
1,003,883✔
146
      if (gcref(g->gc.mmudata)) {  /* Link to end of mmudata list. */
1,003,883✔
147
        GCobj *root = gcref(g->gc.mmudata);
997,663✔
148
        setgcrefr(o->gch.nextgc, root->gch.nextgc);
997,663✔
149
        setgcref(root->gch.nextgc, o);
997,663✔
150
        setgcref(g->gc.mmudata, o);
997,663✔
151
      } else {  /* Create circular list. */
152
        setgcref(o->gch.nextgc, o);
6,220✔
153
        setgcref(g->gc.mmudata, o);
6,220✔
154
      }
155
    }
156
  }
157
  return m;
6,865✔
158
}
159

160
/* -- Propagation phase --------------------------------------------------- */
161

162
/* Traverse a table. */
163
static int gc_traverse_tab(global_State *g, GCtab *t)
2,851,418✔
164
{
165
  int weak = 0;
2,851,418✔
166
  cTValue *mode;
2,851,418✔
167
  GCtab *mt = tabref(t->metatable);
2,851,418✔
168
  if (mt)
2,851,418✔
169
    gc_markobj(g, mt);
30,356✔
170
  mode = lj_meta_fastg(g, mt, MM_mode);
2,851,418✔
171
  if (mode && tvisstr(mode)) {  /* Valid __mode field? */
18,333✔
172
    const char *modestr = strVdata(mode);
18,263✔
173
    int c;
18,263✔
174
    while ((c = *modestr++)) {
49,394✔
175
      if (c == 'k') weak |= LJ_GC_WEAKKEY;
31,131✔
176
      else if (c == 'v') weak |= LJ_GC_WEAKVAL;
12,892✔
177
    }
178
    if (weak) {  /* Weak tables are cleared in the atomic phase. */
18,263✔
179
#if LJ_HASFFI
180
      CTState *cts = ctype_ctsG(g);
18,263✔
181
      if (cts && cts->finalizer == t) {
18,263✔
182
        weak = (int)(~0u & ~LJ_GC_WEAKVAL);
183
      } else
184
#endif
185
      {
186
        t->marked = (uint8_t)((t->marked & ~LJ_GC_WEAK) | weak);
12,894✔
187
        setgcrefr(t->gclist, g->gc.weak);
12,894✔
188
        setgcref(g->gc.weak, obj2gco(t));
12,894✔
189
      }
190
    }
191
  }
192
  if (weak == LJ_GC_WEAK)  /* Nothing to mark if both keys/values are weak. */
24,917✔
193
    return 1;
194
  if (!(weak & LJ_GC_WEAKVAL)) {  /* Mark array part. */
2,838,550✔
195
    MSize i, asize = t->asize;
2,838,526✔
196
    for (i = 0; i < asize; i++)
8,666,386✔
197
      gc_marktv(g, arrayslot(t, i));
5,827,860✔
198
  }
199
  if (t->hmask > 0) {  /* Mark hash part. */
2,838,550✔
200
    Node *node = noderef(t->node);
1,744,388✔
201
    MSize i, hmask = t->hmask;
1,744,388✔
202
    for (i = 0; i <= hmask; i++) {
277,222,264✔
203
      Node *n = &node[i];
275,477,876✔
204
      if (!tvisnil(&n->val)) {  /* Mark non-empty slot. */
275,477,876✔
205
        lj_assertG(!tvisnil(&n->key), "mark of nil key in non-empty slot");
266,984,167✔
206
        if (!(weak & LJ_GC_WEAKKEY)) gc_marktv(g, &n->key);
266,984,167✔
207
        if (!(weak & LJ_GC_WEAKVAL)) gc_marktv(g, &n->val);
275,477,876✔
208
      }
209
    }
210
  }
211
  return weak;
212
}
213

214
/* Traverse a function. */
215
static void gc_traverse_func(global_State *g, GCfunc *fn)
3,404,179✔
216
{
217
  gc_markobj(g, tabref(fn->c.env));
3,404,179✔
218
  if (isluafunc(fn)) {
3,404,179✔
219
    uint32_t i;
2,110,180✔
220
    lj_assertG(fn->l.nupvalues <= funcproto(fn)->sizeuv,
2,110,180✔
221
               "function upvalues out of range");
222
    gc_markobj(g, funcproto(fn));
2,110,180✔
223
    for (i = 0; i < fn->l.nupvalues; i++)  /* Mark Lua function upvalues. */
3,950,076✔
224
      gc_markobj(g, &gcref(fn->l.uvptr[i])->uv);
1,839,896✔
225
  } else {
226
    uint32_t i;
227
    for (i = 0; i < fn->c.nupvalues; i++)  /* Mark C function upvalues. */
1,422,303✔
228
      gc_marktv(g, &fn->c.upvalue[i]);
128,304✔
229
  }
230
}
3,404,179✔
231

232
#if LJ_HASJIT
233
/* Mark a trace. */
234
static void gc_marktrace(global_State *g, TraceNo traceno)
114,659✔
235
{
236
  GCobj *o = obj2gco(traceref(G2J(g), traceno));
114,659✔
237
  lj_assertG(traceno != G2J(g)->cur.traceno, "active trace escaped");
114,659✔
238
  if (iswhite(o)) {
114,659✔
239
    white2gray(o);
63,776✔
240
    setgcrefr(o->gch.gclist, g->gc.gray);
63,776✔
241
    setgcref(g->gc.gray, o);
63,776✔
242
  }
243
}
244

245
/* Traverse a trace. */
246
static void gc_traverse_trace(global_State *g, GCtrace *T)
70,187✔
247
{
248
  IRRef ref;
70,187✔
249
  if (T->traceno == 0) return;
70,187✔
250
  for (ref = T->nk; ref < REF_TRUE; ref++) {
701,672✔
251
    IRIns *ir = &T->ir[ref];
637,926✔
252
    if (ir->o == IR_KGC)
637,926✔
253
      gc_markobj(g, ir_kgc(ir));
216,764✔
254
    if (irt_is64(ir->t) && ir->o != IR_KNULL)
637,926✔
255
      ref++;
345,061✔
256
  }
257
  if (T->link) gc_marktrace(g, T->link);
63,746✔
258
  if (T->nextroot) gc_marktrace(g, T->nextroot);
63,746✔
259
  if (T->nextside) gc_marktrace(g, T->nextside);
63,746✔
260
  gc_markobj(g, gcref(T->startpt));
63,746✔
261
}
262

263
/* The current trace is a GC root while not anchored in the prototype (yet). */
264
#define gc_traverse_curtrace(g)        gc_traverse_trace(g, &G2J(g)->cur)
265
#else
266
#define gc_traverse_curtrace(g)        UNUSED(g)
267
#endif
268

269
/* Traverse a prototype. */
270
static void gc_traverse_proto(global_State *g, GCproto *pt)
390,690✔
271
{
272
  ptrdiff_t i;
390,690✔
273
  gc_mark_str(proto_chunkname(pt));
390,690✔
274
  for (i = -(ptrdiff_t)pt->sizekgc; i < 0; i++)  /* Mark collectable consts. */
2,110,009✔
275
    gc_markobj(g, proto_kgc(pt, i));
1,719,319✔
276
#if LJ_HASJIT
277
  if (pt->trace) gc_marktrace(g, pt->trace);
390,690✔
278
#endif
279
}
390,690✔
280

281
/* Traverse the frame structure of a stack. */
282
static MSize gc_traverse_frames(global_State *g, lua_State *th)
283
{
284
  TValue *frame, *top = th->top-1, *bot = tvref(th->stack);
285
  /* Note: extra vararg frame not skipped, marks function twice (harmless). */
286
  for (frame = th->base-1; frame > bot+LJ_FR2; frame = frame_prev(frame)) {
287
    GCfunc *fn = frame_func(frame);
288
    TValue *ftop = frame;
289
    if (isluafunc(fn)) ftop += funcproto(fn)->framesize;
290
    if (ftop > top) top = ftop;
291
    if (!LJ_FR2) gc_markobj(g, fn);  /* Need to mark hidden function (or L). */
292
  }
293
  top++;  /* Correct bias of -1 (frame == base-1). */
294
  if (top > tvref(th->maxstack)) top = tvref(th->maxstack);
295
  return (MSize)(top - bot);  /* Return minimum needed stack size. */
296
}
297

298
/* Traverse a thread object. */
299
static void gc_traverse_thread(global_State *g, lua_State *th)
1,570,167✔
300
{
301
  TValue *o, *top = th->top;
1,570,167✔
302
  for (o = tvref(th->stack)+1+LJ_FR2; o < top; o++)
11,401,679✔
303
    gc_marktv(g, o);
9,831,512✔
304
  if (g->gc.state == GCSatomic) {
1,570,167✔
305
    top = tvref(th->stack) + th->stacksize;
784,618✔
306
    for (; o < top; o++)  /* Clear unmarked slots. */
31,565,680✔
307
      setnilV(o);
30,781,062✔
308
  }
309
  gc_markobj(g, tabref(th->env));
1,570,167✔
310
  lj_state_shrinkstack(th, gc_traverse_frames(g, th));
1,570,167✔
311
}
1,570,167✔
312

313
/* Propagate one gray object. Traverse it and turn it black. */
314
static size_t propagatemark(global_State *g)
8,280,240✔
315
{
316
  GCobj *o = gcref(g->gc.gray);
8,280,240✔
317
  int gct = o->gch.gct;
8,280,240✔
318
  lj_assertG(isgray(o), "propagation of non-gray object");
8,280,240✔
319
  gray2black(o);
8,280,240✔
320
  setgcrefr(g->gc.gray, o->gch.gclist);  /* Remove from gray list. */
8,280,240✔
321
  if (LJ_LIKELY(gct == ~LJ_TTAB)) {
8,280,240✔
322
    GCtab *t = gco2tab(o);
2,851,418✔
323
    if (gc_traverse_tab(g, t) > 0)
2,851,418✔
324
      black2gray(o);  /* Keep weak tables gray. */
12,894✔
325
    return sizeof(GCtab) + sizeof(TValue) * t->asize +
2,851,418✔
326
                           (t->hmask ? sizeof(Node) * (t->hmask + 1) : 0);
2,851,418✔
327
  } else if (LJ_LIKELY(gct == ~LJ_TFUNC)) {
5,428,822✔
328
    GCfunc *fn = gco2func(o);
3,404,179✔
329
    gc_traverse_func(g, fn);
3,404,179✔
330
    return isluafunc(fn) ? sizeLfunc((MSize)fn->l.nupvalues) :
3,404,179✔
331
                           sizeCfunc((MSize)fn->c.nupvalues);
1,293,999✔
332
  } else if (LJ_LIKELY(gct == ~LJ_TPROTO)) {
2,024,643✔
333
    GCproto *pt = gco2pt(o);
390,690✔
334
    gc_traverse_proto(g, pt);
390,690✔
335
    return pt->sizept;
390,690✔
336
  } else if (LJ_LIKELY(gct == ~LJ_TTHREAD)) {
1,633,953✔
337
    lua_State *th = gco2th(o);
1,570,167✔
338
    setgcrefr(th->gclist, g->gc.grayagain);
1,570,167✔
339
    setgcref(g->gc.grayagain, o);
1,570,167✔
340
    black2gray(o);  /* Threads are never black. */
1,570,167✔
341
    gc_traverse_thread(g, th);
1,570,167✔
342
    return sizeof(lua_State) + sizeof(TValue) * th->stacksize;
1,570,167✔
343
  } else {
344
#if LJ_HASJIT
345
    GCtrace *T = gco2trace(o);
63,786✔
346
    gc_traverse_trace(g, T);
63,786✔
347
    return ((sizeof(GCtrace)+7)&~7) + (T->nins-T->nk)*sizeof(IRIns) +
63,786✔
348
           T->nsnap*sizeof(SnapShot) + T->nsnapmap*sizeof(SnapEntry);
63,786✔
349
#else
350
    lj_assertG(0, "bad GC type %d", gct);
351
    return 0;
352
#endif
353
  }
354
}
355

356
/* Propagate all gray objects. */
357
static size_t gc_propagate_gray(global_State *g)
25,604✔
358
{
359
  size_t m = 0;
12,802✔
360
  while (gcref(g->gc.gray) != NULL)
2,968,295✔
361
    m += propagatemark(g);
2,942,691✔
362
  return m;
25,604✔
363
}
364

365
/* -- Sweep phase --------------------------------------------------------- */
366

367
/* Type of GC free functions. */
368
typedef void (LJ_FASTCALL *GCFreeFunc)(global_State *g, GCobj *o);
369

370
/* GC free functions for LJ_TSTR .. LJ_TUDATA. ORDER LJ_T */
371
static const GCFreeFunc gc_freefunc[] = {
372
  (GCFreeFunc)lj_str_free,
373
  (GCFreeFunc)lj_func_freeuv,
374
  (GCFreeFunc)lj_state_free,
375
  (GCFreeFunc)lj_func_freeproto,
376
  (GCFreeFunc)lj_func_free,
377
#if LJ_HASJIT
378
  (GCFreeFunc)lj_trace_free,
379
#else
380
  (GCFreeFunc)0,
381
#endif
382
#if LJ_HASFFI
383
  (GCFreeFunc)lj_cdata_free,
384
#else
385
  (GCFreeFunc)0,
386
#endif
387
  (GCFreeFunc)lj_tab_free,
388
  (GCFreeFunc)lj_udata_free
389
};
390

391
/* Full sweep of a GC list. */
392
#define gc_fullsweep(g, p)        gc_sweep(g, (p), ~(uint32_t)0)
393

394
/* Partial sweep of a GC list. */
395
static GCRef *gc_sweep(global_State *g, GCRef *p, uint32_t lim)
7,014,248✔
396
{
397
  /* Mask with other white and LJ_GC_FIXED. Or LJ_GC_SFIXED on shutdown. */
398
  int ow = otherwhite(g);
7,014,248✔
399
  GCobj *o;
7,014,248✔
400
  while ((o = gcref(*p)) != NULL && lim-- > 0) {
225,587,648✔
401
    if (o->gch.gct == ~LJ_TTHREAD)  /* Need to sweep open upvalues, too. */
218,573,400✔
402
      gc_fullsweep(g, &gco2th(o)->openupval);
1,477,997✔
403
    if (((o->gch.marked ^ LJ_GC_WHITES) & ow)) {  /* Black or current white? */
218,573,400✔
404
      lj_assertG(!isdead(g, o) || (o->gch.marked & LJ_GC_FIXED),
145,940,532✔
405
                 "sweep of undead object");
406
      makewhite(g, o);  /* Value is alive, change to the current white. */
145,940,532✔
407
      p = &o->gch.nextgc;
145,940,532✔
408
    } else {  /* Otherwise value is dead, free it. */
409
      lj_assertG(isdead(g, o) || ow == LJ_GC_SFIXED,
72,632,868✔
410
                 "sweep of unlive object");
411
      setgcrefr(*p, o->gch.nextgc);
72,632,868✔
412
      if (o == gcref(g->gc.root))
72,632,868✔
413
        setgcrefr(g->gc.root, o->gch.nextgc);  /* Adjust list anchor. */
×
414
      gc_freefunc[o->gch.gct - ~LJ_TSTR](g, o);
72,632,868✔
415
    }
416
  }
417
  return p;
7,014,248✔
418
}
419

420
/* Full sweep of a string chain. */
421
static GCRef *gc_sweep_str_chain(global_State *g, GCRef *p)
8,606,626✔
422
{
423
  /* Mask with other white and LJ_GC_FIXED. Or LJ_GC_SFIXED on shutdown. */
424
  int ow = otherwhite(g);
8,606,626✔
425
  GCobj *o;
8,606,626✔
426
  while ((o = gcref(*p)) != NULL) {
13,561,639✔
427
    if (((o->gch.marked ^ LJ_GC_WHITES) & ow)) {  /* Black or current white? */
4,955,013✔
428
      lj_assertG(!isdead(g, o) || (o->gch.marked & LJ_GC_FIXED),
4,489,819✔
429
                 "sweep of undead string");
430
      makewhite(g, o);  /* Value is alive, change to the current white. */
4,489,819✔
431
#if LUAJIT_SMART_STRINGS
432
      if (strsmart(&o->str)) {
4,489,819✔
433
        /* must match lj_str_new */
434
        bloomset(g->strbloom.next[0], o->str.hash >> (sizeof(o->str.hash)*8-6));
5,343✔
435
        bloomset(g->strbloom.next[1], o->str.strflags);
5,343✔
436
      }
437
#endif
438
      p = &o->gch.nextgc;
4,489,819✔
439
    } else {  /* Otherwise value is dead, free it. */
440
      lj_assertG(isdead(g, o) || ow == LJ_GC_SFIXED,
465,194✔
441
                 "sweep of unlive string");
442
      setgcrefr(*p, o->gch.nextgc);
465,194✔
443
      lj_str_free(g, &o->str);
465,194✔
444
    }
445
  }
446
  return p;
8,606,626✔
447
}
448

449
/* Check whether we can clear a key or a value slot from a table. */
450
static int gc_mayclear(cTValue *o, int val)
451
{
452
  if (tvisgcv(o)) {  /* Only collectable objects can be weak references. */
453
    if (tvisstr(o)) {  /* But strings cannot be used as weak references. */
454
      gc_mark_str(strV(o));  /* And need to be marked. */
455
      return 0;
456
    }
457
    if (iswhite(gcV(o)))
458
      return 1;  /* Object is about to be collected. */
459
    if (tvisudata(o) && val && isfinalized(udataV(o)))
460
      return 1;  /* Finalized userdata is dropped only from values. */
461
  }
462
  return 0;  /* Cannot clear. */
463
}
464

465
/* Clear collected entries from weak tables. */
466
static void gc_clearweak(global_State *g, GCobj *o)
467
{
468
  UNUSED(g);
469
  while (o) {
470
    GCtab *t = gco2tab(o);
471
    lj_assertG((t->marked & LJ_GC_WEAK), "clear of non-weak table");
472
    if ((t->marked & LJ_GC_WEAKVAL)) {
473
      MSize i, asize = t->asize;
474
      for (i = 0; i < asize; i++) {
475
        /* Clear array slot when value is about to be collected. */
476
        TValue *tv = arrayslot(t, i);
477
        if (gc_mayclear(tv, 1))
478
          setnilV(tv);
479
      }
480
    }
481
    if (t->hmask > 0) {
482
      Node *node = noderef(t->node);
483
      MSize i, hmask = t->hmask;
484
      for (i = 0; i <= hmask; i++) {
485
        Node *n = &node[i];
486
        /* Clear hash slot when key or value is about to be collected. */
487
        if (!tvisnil(&n->val) && (gc_mayclear(&n->key, 0) ||
488
                                  gc_mayclear(&n->val, 1)))
489
          setnilV(&n->val);
490
      }
491
    }
492
    o = gcref(t->gclist);
493
  }
494
}
495

496
/* Call a userdata or cdata finalizer. */
497
static void gc_call_finalizer(global_State *g, lua_State *L,
498
                              cTValue *mo, GCobj *o)
499
{
500
  /* Save and restore lots of state around the __gc callback. */
501
  uint8_t oldh = hook_save(g);
502
  GCSize oldt = g->gc.threshold;
503
  int errcode;
504
  TValue *top;
505
  lj_trace_abort(g);
506
  hook_entergc(g);  /* Disable hooks and new traces during __gc. */
507
  if (LJ_HASPROFILE && (oldh & HOOK_PROFILE)) lj_dispatch_update(g);
508
  g->gc.threshold = LJ_MAX_MEM;  /* Prevent GC steps. */
509
  top = L->top;
510
  copyTV(L, top++, mo);
511
  if (LJ_FR2) setnilV(top++);
512
  setgcV(L, top, o, ~o->gch.gct);
513
  L->top = top+1;
514
  errcode = lj_vm_pcall(L, top, 1+0, -1);  /* Stack: |mo|o| -> | */
515
  hook_restore(g, oldh);
516
  if (LJ_HASPROFILE && (oldh & HOOK_PROFILE)) lj_dispatch_update(g);
517
  g->gc.threshold = oldt;  /* Restore GC threshold. */
518
  if (errcode)
519
    lj_err_throw(L, errcode);  /* Propagate errors. */
520
}
521

522
/* Finalize one userdata or cdata object from the mmudata list. */
523
static void gc_finalize(lua_State *L)
1,003,890✔
524
{
525
  global_State *g = G(L);
1,003,890✔
526
  GCobj *o = gcnext(gcref(g->gc.mmudata));
1,003,890✔
527
  cTValue *mo;
1,003,890✔
528
  lj_assertG(tvref(g->jit_base) == NULL, "finalizer called on trace");
1,003,890✔
529
  /* Unchain from list of userdata to be finalized. */
530
  if (o == gcref(g->gc.mmudata))
1,003,890✔
531
    setgcrefnull(g->gc.mmudata);
6,226✔
532
  else
533
    setgcrefr(gcref(g->gc.mmudata)->gch.nextgc, o->gch.nextgc);
997,664✔
534
#if LJ_HASFFI
535
  if (o->gch.gct == ~LJ_TCDATA) {
1,003,890✔
536
    TValue tmp, *tv;
7✔
537
    /* Add cdata back to the GC list and make it white. */
538
    setgcrefr(o->gch.nextgc, g->gc.root);
7✔
539
    setgcref(g->gc.root, o);
7✔
540
    makewhite(g, o);
7✔
541
    o->gch.marked &= (uint8_t)~LJ_GC_CDATA_FIN;
7✔
542
    /* Resolve finalizer. */
543
    setcdataV(L, &tmp, gco2cd(o));
7✔
544
    tv = lj_tab_set(L, ctype_ctsG(g)->finalizer, &tmp);
7✔
545
    if (!tvisnil(tv)) {
7✔
546
      g->gc.nocdatafin = 0;
7✔
547
      copyTV(L, &tmp, tv);
7✔
548
      setnilV(tv);  /* Clear entry in finalizer table. */
7✔
549
      gc_call_finalizer(g, L, &tmp, o);
7✔
550
    }
551
    return;
7✔
552
  }
553
#endif
554
  /* Add userdata back to the main userdata list and make it white. */
555
  setgcrefr(o->gch.nextgc, mainthread(g)->nextgc);
1,003,883✔
556
  setgcref(mainthread(g)->nextgc, o);
1,003,883✔
557
  makewhite(g, o);
1,003,883✔
558
  /* Resolve the __gc metamethod. */
559
  mo = lj_meta_fastg(g, tabref(gco2ud(o)->metatable), MM_gc);
1,003,883✔
560
  if (mo)
1,003,883✔
561
    gc_call_finalizer(g, L, mo, o);
1,003,883✔
562
}
563

564
/* Finalize all userdata objects from mmudata list. */
565
void lj_gc_finalize_udata(lua_State *L)
248✔
566
{
567
  while (gcref(G(L)->gc.mmudata) != NULL)
1,410✔
568
    gc_finalize(L);
1,173✔
569
}
237✔
570

571
#if LJ_HASFFI
572
/* Finalize all cdata objects from finalizer table. */
573
void lj_gc_finalize_cdata(lua_State *L)
248✔
574
{
575
  global_State *g = G(L);
248✔
576
  CTState *cts = ctype_ctsG(g);
248✔
577
  if (cts) {
248✔
578
    GCtab *t = cts->finalizer;
104✔
579
    Node *node = noderef(t->node);
104✔
580
    ptrdiff_t i;
104✔
581
    setgcrefnull(t->metatable);  /* Mark finalizer table as disabled. */
104✔
582
    for (i = (ptrdiff_t)t->hmask; i >= 0; i--)
316✔
583
      if (!tvisnil(&node[i].val) && tviscdata(&node[i].key)) {
212✔
584
        GCobj *o = gcV(&node[i].key);
4✔
585
        TValue tmp;
4✔
586
        makewhite(g, o);
4✔
587
        o->gch.marked &= (uint8_t)~LJ_GC_CDATA_FIN;
4✔
588
        copyTV(L, &tmp, &node[i].val);
4✔
589
        setnilV(&node[i].val);
4✔
590
        gc_call_finalizer(g, L, &tmp, o);
4✔
591
      }
592
  }
593
}
248✔
594
#endif
595

596
/* Free all remaining GC objects. */
597
void lj_gc_freeall(global_State *g)
228✔
598
{
599
  MSize i, strmask;
228✔
600
  /* Free everything, except super-fixed objects (the main thread). */
601
  g->gc.currentwhite = LJ_GC_WHITES | LJ_GC_SFIXED;
228✔
602
  gc_fullsweep(g, &g->gc.root);
228✔
603
  strmask = g->strmask;
228✔
604
  for (i = 0; i <= strmask; i++)  /* Free all string hash chains. */
161,252✔
605
    gc_fullsweep(g, &g->strhash[i]);
161,024✔
606
}
228✔
607

608
/* -- Collector ----------------------------------------------------------- */
609

610
/* Atomic part of the GC cycle, transitioning from mark to sweep phase. */
611
static void atomic(global_State *g, lua_State *L)
6,401✔
612
{
613
  size_t udsize;
6,401✔
614

615
  gc_mark_uv(g);  /* Need to remark open upvalues (the thread may be dead). */
6,401✔
616
  gc_propagate_gray(g);  /* Propagate any left-overs. */
6,401✔
617

618
  setgcrefr(g->gc.gray, g->gc.weak);  /* Empty the list of weak tables. */
6,401✔
619
  setgcrefnull(g->gc.weak);
6,401✔
620
  lj_assertG(!iswhite(obj2gco(mainthread(g))), "main thread turned white");
6,401✔
621
  gc_markobj(g, L);  /* Mark running thread. */
6,401✔
622
  gc_traverse_curtrace(g);  /* Traverse current trace. */
6,401✔
623
  gc_mark_gcroot(g);  /* Mark GC roots (again). */
6,401✔
624
  gc_propagate_gray(g);  /* Propagate all of the above. */
6,401✔
625

626
  setgcrefr(g->gc.gray, g->gc.grayagain);  /* Empty the 2nd chance list. */
6,401✔
627
  setgcrefnull(g->gc.grayagain);
6,401✔
628
  gc_propagate_gray(g);  /* Propagate it. */
6,401✔
629

630
  udsize = lj_gc_separateudata(g, 0);  /* Separate userdata to be finalized. */
6,401✔
631
  gc_mark_mmudata(g);  /* Mark them. */
6,401✔
632
  udsize += gc_propagate_gray(g);  /* And propagate the marks. */
6,401✔
633

634
  /* All marking done, clear weak tables. */
635
  gc_clearweak(g, gcref(g->gc.weak));
6,401✔
636

637
  lj_buf_shrink(L, &g->tmpbuf);  /* Shrink temp buffer. */
6,401✔
638

639
  /* Prepare for sweep phase. */
640
  g->gc.currentwhite = (uint8_t)otherwhite(g);  /* Flip current white. */
6,401✔
641
  g->strempty.marked = g->gc.currentwhite;
6,401✔
642
  setmref(g->gc.sweep, &g->gc.root);
6,401✔
643
  g->gc.estimate = g->gc.total - (GCSize)udsize;  /* Initial estimate. */
6,401✔
644
}
6,401✔
645

646
/* GC state machine. Returns a cost estimate for each step performed. */
647
static size_t gc_onestep(lua_State *L)
20,347,188✔
648
{
649
  global_State *g = G(L);
20,347,188✔
650
  g->gc.state_count[g->gc.state]++;
20,347,188✔
651
  switch (g->gc.state) {
20,347,188✔
652
  case GCSpause:
6,426✔
653
    gc_mark_start(g);  /* Start a new GC cycle by marking all GC roots. */
6,426✔
654
    return 0;
6,426✔
655
  case GCSpropagate:
5,343,950✔
656
    if (gcref(g->gc.gray) != NULL)
5,343,950✔
657
      return propagatemark(g);  /* Propagate one gray object. */
5,337,549✔
658
    g->gc.state = GCSatomic;  /* End of mark phase. */
6,401✔
659
    return 0;
6,401✔
660
  case GCSatomic:
6,466✔
661
    if (tvref(g->jit_base))  /* Don't run atomic phase on trace. */
6,466✔
662
      return LJ_MAX_MEM;
663
    atomic(g, L);
6,401✔
664
    g->gc.state = GCSsweepstring;  /* Start of sweep phase. */
6,401✔
665
    g->gc.sweepstr = 0;
6,401✔
666
#if LUAJIT_SMART_STRINGS
667
    g->strbloom.next[0] = 0;
6,401✔
668
    g->strbloom.next[1] = 0;
6,401✔
669
#endif
670
    return 0;
6,401✔
671
  case GCSsweepstring: {
8,606,626✔
672
    GCSize old = g->gc.total;
8,606,626✔
673
    gc_sweep_str_chain(g, &g->strhash[g->gc.sweepstr++]);  /* Sweep one chain. */
8,606,626✔
674
    if (g->gc.sweepstr > g->strmask) {
8,606,626✔
675
      g->gc.state = GCSsweep;  /* All string hash chains sweeped. */
7,278✔
676
#if LUAJIT_SMART_STRINGS
677
      g->strbloom.cur[0] = g->strbloom.next[0];
7,278✔
678
      g->strbloom.cur[1] = g->strbloom.next[1];
7,278✔
679
#endif
680
    }
681
    lj_assertG(old >= g->gc.total, "sweep increased memory");
8,606,626✔
682
    g->gc.estimate -= old - g->gc.total;
8,606,626✔
683
    return GCSWEEPCOST;
8,606,626✔
684
    }
685
  case GCSsweep: {
5,374,999✔
686
    GCSize old = g->gc.total;
5,374,999✔
687
    setmref(g->gc.sweep, gc_sweep(g, mref(g->gc.sweep, GCRef), GCSWEEPMAX));
5,374,999✔
688
    lj_assertG(old >= g->gc.total, "sweep increased memory");
5,374,999✔
689
    g->gc.estimate -= old - g->gc.total;
5,374,999✔
690
    if (gcref(*mref(g->gc.sweep, GCRef)) == NULL) {
5,374,999✔
691
      if (g->strnum <= (g->strmask >> 2) && g->strmask > LJ_MIN_STRTAB*2-1)
7,274✔
692
        lj_str_resize(L, g->strmask >> 1);  /* Shrink string table. */
94✔
693
      if (gcref(g->gc.mmudata)) {  /* Need any finalizations? */
7,274✔
694
        g->gc.state = GCSfinalize;
5,997✔
695
#if LJ_HASFFI
696
        g->gc.nocdatafin = 1;
5,997✔
697
#endif
698
      } else {  /* Otherwise skip this phase to help the JIT. */
699
        g->gc.state = GCSpause;  /* End of GC cycle. */
1,277✔
700
        g->gc.debt = 0;
1,277✔
701
      }
702
    }
703
    return GCSWEEPMAX*GCSWEEPCOST;
704
    }
705
  case GCSfinalize:
1,008,721✔
706
    if (gcref(g->gc.mmudata) != NULL) {
1,008,721✔
707
      if (tvref(g->jit_base))  /* Don't call finalizers on trace. */
1,002,728✔
708
        return LJ_MAX_MEM;
709
      gc_finalize(L);  /* Finalize one userdata object. */
1,002,717✔
710
      if (g->gc.estimate > GCFINALIZECOST)
1,002,715✔
711
        g->gc.estimate -= GCFINALIZECOST;
1,002,715✔
712
      return GCFINALIZECOST;
1,002,715✔
713
    }
714
#if LJ_HASFFI
715
    if (!g->gc.nocdatafin) lj_tab_rehash(L, ctype_ctsG(g)->finalizer);
5,993✔
716
#endif
717
    g->gc.state = GCSpause;  /* End of GC cycle. */
5,993✔
718
    g->gc.debt = 0;
5,993✔
719
    return 0;
5,993✔
720
  default:
721
    lj_assertG(0, "bad GC state");
722
    return 0;
723
  }
724
}
725

726
/* Perform a limited amount of incremental GC steps. */
727
int LJ_FASTCALL lj_gc_step(lua_State *L)
621,943✔
728
{
729
  global_State *g = G(L);
621,943✔
730
  GCSize lim;
621,943✔
731
  int32_t ostate = g->vmstate;
621,943✔
732
  setvmstate(g, GC);
621,943✔
733
  lim = (GCSTEPSIZE/100) * g->gc.stepmul;
621,943✔
734
  if (lim == 0)
621,943✔
735
    lim = LJ_MAX_MEM;
×
736
  if (g->gc.total > g->gc.threshold)
621,943✔
737
    g->gc.debt += g->gc.total - g->gc.threshold;
607,366✔
738
  do {
9,464,541✔
739
    lim -= (GCSize)gc_onestep(L);
9,464,541✔
740
    if (g->gc.state == GCSpause) {
9,464,541✔
741
      g->gc.threshold = (g->gc.estimate/100) * g->gc.pause;
5,499✔
742
      g->vmstate = ostate;
5,499✔
743
      return 1;  /* Finished a GC cycle. */
5,499✔
744
    }
745
  } while (sizeof(lim) == 8 ? ((int64_t)lim > 0) : ((int32_t)lim > 0));
9,459,042✔
746
  if (g->gc.debt < GCSTEPSIZE) {
616,444✔
747
    g->gc.threshold = g->gc.total + GCSTEPSIZE;
207,261✔
748
    g->vmstate = ostate;
207,261✔
749
    return -1;
207,261✔
750
  } else {
751
    g->gc.debt -= GCSTEPSIZE;
409,183✔
752
    g->gc.threshold = g->gc.total;
409,183✔
753
    g->vmstate = ostate;
409,183✔
754
    return 0;
409,183✔
755
  }
756
}
757

758
/* Ditto, but fix the stack top first. */
759
void LJ_FASTCALL lj_gc_step_fixtop(lua_State *L)
67,585✔
760
{
761
  if (curr_funcisL(L)) L->top = curr_topL(L);
67,585✔
762
  lj_gc_step(L);
67,585✔
763
}
67,585✔
764

765
#if LJ_HASJIT
766
/* Perform multiple GC steps. Called from JIT-compiled code. */
767
int LJ_FASTCALL lj_gc_step_jit(global_State *g, MSize steps)
372,913✔
768
{
769
  lua_State *L = gco2th(gcref(g->cur_L));
372,913✔
770
  L->base = tvref(G(L)->jit_base);
372,913✔
771
  L->top = curr_topL(L);
372,913✔
772
  while (steps-- > 0 && lj_gc_step(L) == 0)
746,141✔
773
    ;
671,506✔
774
  /* Return 1 to force a trace exit. */
775
  return (G(L)->gc.state == GCSatomic || G(L)->gc.state == GCSfinalize);
372,913✔
776
}
777
#endif
778

779
/* Perform a full GC cycle. */
780
void lj_gc_fullgc(lua_State *L)
888✔
781
{
782
  global_State *g = G(L);
888✔
783
  int32_t ostate = g->vmstate;
888✔
784
  setvmstate(g, GC);
888✔
785
  if (g->gc.state <= GCSatomic) {  /* Caught somewhere in the middle. */
888✔
786
    setmref(g->gc.sweep, &g->gc.root);  /* Sweep everything (preserving it). */
883✔
787
    setgcrefnull(g->gc.gray);  /* Reset lists from partial propagation. */
883✔
788
    setgcrefnull(g->gc.grayagain);
883✔
789
    setgcrefnull(g->gc.weak);
883✔
790
    g->gc.state = GCSsweepstring;  /* Fast forward to the sweep phase. */
883✔
791
    g->gc.sweepstr = 0;
883✔
792
  }
793
  while (g->gc.state == GCSsweepstring || g->gc.state == GCSsweep)
3,923,058✔
794
    gc_onestep(L);  /* Finish sweep. */
3,922,170✔
795
  lj_assertG(g->gc.state == GCSfinalize || g->gc.state == GCSpause,
888✔
796
             "bad GC state");
797
  /* Now perform a full GC. */
798
  g->gc.state = GCSpause;
888✔
799
  do { gc_onestep(L); } while (g->gc.state != GCSpause);
6,960,477✔
800
  g->gc.threshold = (g->gc.estimate/100) * g->gc.pause;
886✔
801
  g->vmstate = ostate;
886✔
802
}
886✔
803

804
/* -- Write barriers ------------------------------------------------------ */
805

806
/* Move the GC propagation frontier forward. */
807
void lj_gc_barrierf(global_State *g, GCobj *o, GCobj *v)
5,063✔
808
{
809
  lj_assertG(isblack(o) && iswhite(v) && !isdead(g, v) && !isdead(g, o),
5,063✔
810
             "bad object states for forward barrier");
811
  lj_assertG(g->gc.state != GCSfinalize && g->gc.state != GCSpause,
5,063✔
812
             "bad GC state");
813
  lj_assertG(o->gch.gct != ~LJ_TTAB, "barrier object is not a table");
5,063✔
814
  /* Preserve invariant during propagation. Otherwise it doesn't matter. */
815
  if (g->gc.state == GCSpropagate || g->gc.state == GCSatomic)
5,063✔
816
    gc_mark(g, v);  /* Move frontier forward. */
5,034✔
817
  else
818
    makewhite(g, o);  /* Make it white to avoid the following barrier. */
29✔
819
}
×
820

821
/* Specialized barrier for closed upvalue. Pass &uv->tv. */
822
void LJ_FASTCALL lj_gc_barrieruv(global_State *g, TValue *tv)
1✔
823
{
824
#define TV2MARKED(x) \
825
  (*((uint8_t *)(x) - offsetof(GCupval, tv) + offsetof(GCupval, marked)))
826
  if (g->gc.state == GCSpropagate || g->gc.state == GCSatomic)
1✔
827
    gc_mark(g, gcV(tv));
×
828
  else
829
    TV2MARKED(tv) = (TV2MARKED(tv) & (uint8_t)~LJ_GC_COLORS) | curwhite(g);
1✔
830
#undef TV2MARKED
831
}
1✔
832

833
/* Close upvalue. Also needs a write barrier. */
834
void lj_gc_closeuv(global_State *g, GCupval *uv)
11,338✔
835
{
836
  GCobj *o = obj2gco(uv);
11,338✔
837
  /* Copy stack slot to upvalue itself and point to the copy. */
838
  copyTV(mainthread(g), &uv->tv, uvval(uv));
11,338✔
839
  setmref(uv->v, &uv->tv);
11,338✔
840
  uv->closed = 1;
11,338✔
841
  setgcrefr(o->gch.nextgc, g->gc.root);
11,338✔
842
  setgcref(g->gc.root, o);
11,338✔
843
  if (isgray(o)) {  /* A closed upvalue is never gray, so fix this. */
11,338✔
844
    if (g->gc.state == GCSpropagate || g->gc.state == GCSatomic) {
527✔
845
      gray2black(o);  /* Make it black and preserve invariant. */
210✔
846
      if (tviswhite(&uv->tv))
210✔
847
        lj_gc_barrierf(g, o, gcV(&uv->tv));
×
848
    } else {
849
      makewhite(g, o);  /* Make it white, i.e. sweep the upvalue. */
317✔
850
      lj_assertG(g->gc.state != GCSfinalize && g->gc.state != GCSpause,
11,338✔
851
                 "bad GC state");
852
    }
853
  }
854
}
11,338✔
855

856
#if LJ_HASJIT
857
/* Mark a trace if it's saved during the propagation phase. */
858
void lj_gc_barriertrace(global_State *g, uint32_t traceno)
2,312✔
859
{
860
  if (g->gc.state == GCSpropagate || g->gc.state == GCSatomic)
2,312✔
861
    gc_marktrace(g, traceno);
202✔
862
}
2,312✔
863
#endif
864

865
/* -- Allocator ----------------------------------------------------------- */
866

867
/* Call pluggable memory allocator to allocate or resize a fragment. */
868
void *lj_mem_realloc(lua_State *L, void *p, GCSize osz, GCSize nsz)
3,899,571✔
869
{
870
  global_State *g = G(L);
3,899,571✔
871
  lj_assertG((osz == 0) == (p == NULL), "realloc API violation");
3,899,571✔
872

873
  setgcref(g->mem_L, obj2gco(L));
3,899,571✔
874
  p = g->allocf(g->allocd, p, osz, nsz);
3,899,571✔
875
  if (p == NULL && nsz > 0)
3,899,571✔
876
    lj_err_mem(L);
×
877
  lj_assertG((nsz == 0) == (p == NULL), "allocf API violation");
3,899,571✔
878
  lj_assertG(checkptrGC(p),
3,899,571✔
879
             "allocated memory address %p outside required range", p);
880
  g->gc.total = (g->gc.total - osz) + nsz;
3,899,571✔
881
  g->gc.allocated += nsz;
3,899,571✔
882
  g->gc.freed += osz;
3,899,571✔
883
  return p;
3,899,571✔
884
}
885

886
/* Allocate new GC object and link it to the root set. */
887
void * LJ_FASTCALL lj_mem_newgco(lua_State *L, GCSize size)
71,494,744✔
888
{
889
  global_State *g = G(L);
71,494,744✔
890
  GCobj *o;
71,494,744✔
891

892
  setgcref(g->mem_L, obj2gco(L));
71,494,744✔
893
  o = (GCobj *)g->allocf(g->allocd, NULL, 0, size);
71,494,744✔
894
  if (o == NULL)
71,494,744✔
895
    lj_err_mem(L);
×
896
  lj_assertG(checkptrGC(o),
71,494,744✔
897
             "allocated memory address %p outside required range", o);
898
  g->gc.total += size;
71,494,744✔
899
  g->gc.allocated += size;
71,494,744✔
900
  setgcrefr(o->gch.nextgc, g->gc.root);
71,494,744✔
901
  setgcref(g->gc.root, o);
71,494,744✔
902
  newwhite(g, o);
71,494,744✔
903
  return o;
71,494,744✔
904
}
905

906
/* Resize growable vector. */
907
void *lj_mem_grow(lua_State *L, void *p, MSize *szp, MSize lim, MSize esz)
166,355✔
908
{
909
  MSize sz = (*szp) << 1;
166,355✔
910
  if (sz < LJ_MIN_VECSZ)
166,355✔
911
    sz = LJ_MIN_VECSZ;
912
  if (sz > lim)
166,355✔
913
    sz = lim;
914
  p = lj_mem_realloc(L, p, (*szp)*esz, sz*esz);
166,355✔
915
  *szp = sz;
166,355✔
916
  return p;
166,355✔
917
}
918

STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc