• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

luttje / glua-api-snippets / 17342129938

30 Aug 2025 09:27AM UTC coverage: 80.397%. Remained the same
17342129938

push

github

luttje
Update to node 22 assert -> with

343 of 427 branches covered (80.33%)

Branch coverage included in aggregate %.

1 of 5 new or added lines in 4 files covered. (20.0%)

1806 of 2246 relevant lines covered (80.41%)

439.06 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

0.0
/src/cli-scraper.ts
1
import { WikiPageMarkupScraper, isLibrary, isClass } from './scrapers/wiki-page-markup-scraper.js';
×
2
import { WikiPageListScraper } from './scrapers/wiki-page-list-scraper.js';
×
NEW
3
import packageJson from '../package.json' with { type: "json" };
×
4
import { GluaApiWriter } from './api-writer/glua-api-writer.js';
×
5
import { scrapeAndCollect } from './scrapers/collector.js';
×
6
import { writeMetadata } from './utils/metadata.js';
×
7
import { RequestInitWithRetry } from 'fetch-retry';
×
8
import { Command } from 'commander';
×
9
import path from 'path';
×
10
import fs from 'fs';
×
11

×
12
async function startScrape() {
×
13
  const program = new Command();
×
14

×
15
  program
×
16
    .version(packageJson.version)
×
17
    .description('Scrapes the Garry\'s Mod wiki for API information')
×
18
    .option('-o, --output <path>', 'The path to the directory where the output json and lua files should be saved', './output')
×
19
    .option('-u, --url <url>', 'The pagelist URL of the Garry\'s Mod wiki that holds all pages to scrape', 'https://wiki.facepunch.com/gmod/')
×
20
    .option('-c, --customOverrides [path]', 'The path to a directory containing custom overrides for the API')
×
21
    .option('-w, --wipe', 'Clean the output directory before scraping', false)
×
22
    .parse(process.argv);
×
23

×
24
  const options = program.opts();
×
25

×
26
  if (!options.url) {
×
27
    console.error('No URL provided');
×
28
    process.exit(1);
×
29
  }
×
30

×
31
  const baseDirectory = options.output.replace(/\/$/, '');
×
32
  const customDirectory = options.customOverrides?.replace(/\/$/, '') ?? null;
×
33
  const baseUrl = options.url.replace(/\/$/, '');
×
34
  const pageListScraper = new WikiPageListScraper(`${baseUrl}/~pagelist?format=json`);
×
35
  const writer = new GluaApiWriter(baseDirectory);
×
36

×
37
  const retryOptions: RequestInitWithRetry = {
×
38
    retries: 5,
×
39
    retryDelay: function (attempt, error, response) {
×
40
      return Math.pow(2, attempt) * 500; // 500, 1000, 2000, 4000, 8000
×
41
    }
×
42
  }
×
43

×
44
  pageListScraper.setRetryOptions(retryOptions);
×
45

×
46
  writeMetadata(baseUrl, baseDirectory);
×
47

×
48
  if (options.wipe && fs.existsSync(baseDirectory))
×
49
    fs.rmSync(baseDirectory, { recursive: true });
×
50

×
51
  if (!fs.existsSync(baseDirectory))
×
52
    fs.mkdirSync(baseDirectory, { recursive: true });
×
53

×
54
  if (customDirectory !== null) {
×
55
    if (!fs.existsSync(customDirectory)) {
×
56
      console.error(`Custom overrides directory ${customDirectory} does not exist`);
×
57
      process.exit(1);
×
58
    }
×
59

×
60
    const files = fs.readdirSync(customDirectory);
×
61

×
62
    for (const file of files) {
×
63
      const filePath = path.join(customDirectory, file);
×
64
      const fileStat = fs.statSync(filePath);
×
65

×
66
      if (fileStat.isDirectory()) {
×
67
        console.warn(`Skipping directory ${file} in custom (not supported)`);
×
68
        continue;
×
69
      }
×
70

×
71
      // Besides the prefix helping us discern between overrides and files to copy, it also prevents conflicts with the wiki pages (since none of them start with _)
×
72
      if (file.startsWith('_')) {
×
73
        fs.copyFileSync(filePath, path.join(baseDirectory, file));
×
74
        continue;
×
75
      }
×
76

×
77
      const fileContent = fs.readFileSync(filePath, { encoding: 'utf-8' });
×
78
      const pageName = file.replace(/\.lua$/, '');
×
79
      writer.addOverride(pageName, fileContent);
×
80
    }
×
81
  }
×
82

×
83
  console.log('Collecting all pages...');
×
84
  let collect_start = performance.now();
×
85

×
86
  const pageIndexes = await scrapeAndCollect(pageListScraper);
×
87

×
88
  console.log(`Took ${Math.floor((performance.now() - collect_start) / 100) / 10}s!\n`);
×
89

×
90
  console.log('Scraping all pages...');
×
91
  let scrape_start = performance.now();
×
92

×
93
  let cur = 0;
×
94
  let queue: Promise<any>[] = [];
×
95
  for (const pageIndex of pageIndexes) {
×
96
    const pageMarkupScraper = new WikiPageMarkupScraper(`${baseUrl}/${pageIndex.address}?format=text`);
×
97

×
98
    const indexForThis = cur++;
×
99
    pageMarkupScraper.on('scraped', (url, pageMarkups) => {
×
100
      if (pageMarkups.length === 0)
×
101
        return;
×
102

×
103
      // Generate file names
×
104
      let fileName = pageIndex.address;
×
105
      let moduleName = fileName;
×
106

×
107
      if (fileName.includes('.') || fileName.includes(':') || fileName.includes('/')) {
×
108
        [moduleName, fileName] = fileName.split(/[:.\/]/, 2);
×
109
      }
×
110

×
111
      fileName = fileName.replace(/[^a-z0-9]/gi, '_').toLowerCase();
×
112

×
113
      // Make sure modules like Entity and ENTITY are placed in the same file.
×
114
      moduleName = moduleName.toLowerCase();
×
115

×
116
      // Special cases for library and hook pages
×
117
      if (moduleName.endsWith("(library)")) moduleName = moduleName.substring(0, moduleName.length - 9);
×
118
      if (moduleName.endsWith("_hooks")) moduleName = moduleName.substring(0, moduleName.length - 6);
×
119

×
120
      const moduleFile = path.join(baseDirectory, moduleName);
×
121

×
122
      // Write Lua API docs
×
123
      writer.writePages(pageMarkups, path.join(baseDirectory, `${moduleName}.lua`), indexForThis);
×
124

×
125
      // Write JSON data
×
126
      if (!fs.existsSync(moduleFile))
×
127
        fs.mkdirSync(moduleFile, { recursive: true });
×
128

×
129
      const json = JSON.stringify(pageMarkups, null, 2);
×
130
      fs.writeFileSync(path.join(baseDirectory, moduleName, `${fileName}.json`), json);
×
131
    });
×
132

×
133
    queue.push(pageMarkupScraper.scrape());
×
134

×
135
    if (queue.length > 20) {
×
136
      const results = await Promise.allSettled(queue);
×
137
      for (const result of results) {
×
138
        if (result.status === "rejected") console.warn("Failed to scrape a page!", result.reason);
×
139
      }
×
140
      queue = [];
×
141
    }
×
142
  }
×
143

×
144
  // Await any after the loop exits
×
145
  const results = await Promise.allSettled(queue);
×
146
  for (const result of results) {
×
147
    if (result.status === "rejected") console.warn("Failed to scrape a page!", result.reason);
×
148
  }
×
149

×
150
  console.log(`Took ${Math.floor((performance.now() - scrape_start) / 100) / 10}s!`);
×
151

×
152
  writer.writeToDisk();
×
153

×
154
  console.log(`Done with scraping! You can find the output in ${baseDirectory}`);
×
155
}
×
156

×
157
startScrape().catch((err) => {
×
158
  console.error(err);
×
159
  process.exit(1);
×
160
});
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc