• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

visgl / loaders.gl / 20382848403

19 Dec 2025 09:20PM UTC coverage: 35.219% (+0.1%) from 35.095%
20382848403

push

github

web-flow
feat: Upgrade to handle ArrayBufferLike (#3271)

1190 of 2002 branches covered (59.44%)

Branch coverage included in aggregate %.

157 of 269 new or added lines in 41 files covered. (58.36%)

3 existing lines in 3 files now uncovered.

37536 of 107957 relevant lines covered (34.77%)

0.79 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

28.93
/modules/zip/src/parse-zip/zip-composition.ts
1
import {
1✔
2
  concatenateArrayBuffers,
1✔
3
  path,
1✔
4
  NodeFilesystem,
1✔
5
  NodeFile,
1✔
6
  toArrayBuffer
1✔
7
} from '@loaders.gl/loader-utils';
1✔
8
import {ZipEoCDRecord, generateEoCD, parseEoCDRecord, updateEoCD} from './end-of-central-directory';
1✔
9
import {CRC32Hash} from '@loaders.gl/crypto';
1✔
10
import {generateLocalHeader} from './local-file-header';
1✔
11
import {generateCDHeader} from './cd-file-header';
1✔
12
import {fetchFile} from '@loaders.gl/core';
1✔
13
import {readRange} from './readable-file-utils';
1✔
14

1✔
15
/**
1✔
16
 * cut off CD and EoCD records from zip file
1✔
17
 * @param provider zip file
1✔
18
 * @returns tuple with three values: CD, EoCD record, EoCD information
1✔
19
 */
1✔
20
async function cutTheTailOff(
×
21
  provider: NodeFile
×
22
): Promise<[ArrayBuffer, ArrayBuffer, ZipEoCDRecord]> {
×
23
  // define where the body ends
×
24
  const oldEoCDinfo = await parseEoCDRecord(provider);
×
25
  const oldCDStartOffset = oldEoCDinfo.cdStartOffset;
×
26
  const providerSize = (await provider.stat()).bigsize;
×
27

×
28
  // define cd length
×
29
  const oldCDLength = Number(
×
30
    oldEoCDinfo.offsets.zip64EoCDOffset
×
31
      ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset
×
32
      : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset
×
33
  );
×
34

×
35
  // cut off everything except of archieve body
×
36
  const zipEnding = await readRange(provider, oldCDStartOffset, providerSize);
×
37
  await provider.truncate(Number(oldCDStartOffset));
×
38

×
39
  // divide cd body and eocd record
×
40
  const oldCDBody = zipEnding.slice(0, oldCDLength);
×
41
  const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);
×
42

×
43
  return [oldCDBody, eocdBody, oldEoCDinfo];
×
44
}
×
45

1✔
46
/**
1✔
47
 * generates CD and local headers for the file
1✔
48
 * @param fileName name of the file
1✔
49
 * @param fileToAdd buffer with the file
1✔
50
 * @param localFileHeaderOffset offset of the file local header
1✔
51
 * @returns tuple with two values: local header and file body, cd header
1✔
52
 */
1✔
53
async function generateFileHeaders(
×
54
  fileName: string,
×
55
  fileToAdd: ArrayBuffer,
×
56
  localFileHeaderOffset: bigint
×
57
): Promise<[Uint8Array, Uint8Array]> {
×
58
  // generating CRC32 of the content
×
59
  const newFileCRC322 = parseInt(await new CRC32Hash().hash(fileToAdd, 'hex'), 16);
×
60

×
61
  // generate local header for the file
×
62
  const newFileLocalHeader = generateLocalHeader({
×
63
    crc32: newFileCRC322,
×
64
    fileName,
×
65
    length: fileToAdd.byteLength
×
66
  });
×
67

×
68
  // generate hash file cd header
×
69
  const newFileCDHeader = generateCDHeader({
×
70
    crc32: newFileCRC322,
×
71
    fileName,
×
72
    offset: localFileHeaderOffset,
×
73
    length: fileToAdd.byteLength
×
74
  });
×
75
  return [
×
76
    new Uint8Array(concatenateArrayBuffers(newFileLocalHeader, fileToAdd)),
×
77
    new Uint8Array(newFileCDHeader)
×
78
  ];
×
79
}
×
80

1✔
81
/**
1✔
82
 * adds one file in the end of the archieve
1✔
83
 * @param zipUrl path to the file
1✔
84
 * @param fileToAdd new file body
1✔
85
 * @param fileName new file name
1✔
86
 */
1✔
87
export async function addOneFile(zipUrl: string, fileToAdd: ArrayBuffer, fileName: string) {
×
88
  // init file handler
×
89
  const provider = new NodeFile(zipUrl, 'a+');
×
90

×
91
  const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);
×
92

×
93
  let currentOffset = (await provider.stat()).bigsize;
×
94

×
95
  // remember the new file local header start offset
×
96
  const newFileOffset = currentOffset;
×
97

×
98
  const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);
×
99

×
100
  // write down the file local header
×
101
  await provider.append(localPart);
×
102
  currentOffset += BigInt(localPart.byteLength);
×
103

×
104
  // add the file CD header to the CD
×
105
  const newCDBody = concatenateArrayBuffers(oldCDBody, cdHeaderPart);
×
106

×
107
  // remember the CD start offset
×
108
  const newCDStartOffset = currentOffset;
×
109

×
110
  // write down new CD
×
111
  await provider.append(new Uint8Array(newCDBody));
×
112
  currentOffset += BigInt(newCDBody.byteLength);
×
113

×
114
  // remember where eocd starts
×
115
  const eocdOffset = currentOffset;
×
116

×
117
  const updatedEoCD = updateEoCD(
×
118
    eocdBody,
×
119
    oldEoCDinfo.offsets,
×
120
    newCDStartOffset,
×
121
    eocdOffset,
×
122
    oldEoCDinfo.cdRecordsNumber + 1n
×
123
  );
×
124

×
125
  await provider.append(updatedEoCD);
×
126
  currentOffset += BigInt(updatedEoCD.byteLength);
×
127
}
×
128

1✔
129
/**
1✔
130
 * creates zip archive with no compression
1✔
131
 * @note This is a node specific function that works on files
1✔
132
 * @param inputPath path where files for the achive are stored
1✔
133
 * @param outputPath path where zip archive will be placed
1✔
134
 */
1✔
135
export async function createZip(
×
136
  inputPath: string,
×
137
  outputPath: string,
×
138
  createAdditionalData?: (
×
139
    fileList: {fileName: string; localHeaderOffset: bigint}[]
×
140
  ) => Promise<{path: string; file: ArrayBuffer}>
×
141
) {
×
142
  const fileIterator = getFileIterator(inputPath);
×
143

×
144
  const resFile = new NodeFile(outputPath, 'w');
×
145
  const fileList: {fileName: string; localHeaderOffset: bigint}[] = [];
×
146

×
147
  const cdArray: ArrayBuffer[] = [];
×
148
  for await (const file of fileIterator) {
×
149
    await addFile(file, resFile, cdArray, fileList);
×
150
  }
×
151
  if (createAdditionalData) {
×
152
    const additionaldata = await createAdditionalData(fileList);
×
153
    await addFile(additionaldata, resFile, cdArray);
×
154
  }
×
155
  const cdOffset = (await resFile.stat()).bigsize;
×
156
  const cd = concatenateArrayBuffers(...cdArray);
×
157
  await resFile.append(new Uint8Array(cd));
×
158
  const eoCDStart = (await resFile.stat()).bigsize;
×
159
  await resFile.append(
×
160
    new Uint8Array(
×
161
      generateEoCD({recordsNumber: cdArray.length, cdSize: cd.byteLength, cdOffset, eoCDStart})
×
162
    )
×
163
  );
×
164
}
×
165

1✔
166
/**
1✔
167
 * Adds file to zip parts
1✔
168
 * @param file file to add
1✔
169
 * @param resFile zip file body
1✔
170
 * @param cdArray zip file central directory
1✔
171
 * @param fileList list of file offsets
1✔
172
 */
1✔
173
async function addFile(
×
174
  file: {path: string; file: ArrayBuffer},
×
175
  resFile: NodeFile,
×
176
  cdArray: ArrayBuffer[],
×
177
  fileList?: {fileName: string; localHeaderOffset: bigint}[]
×
178
) {
×
179
  const size = (await resFile.stat()).bigsize;
×
180
  fileList?.push({fileName: file.path, localHeaderOffset: size});
×
181
  const [localPart, cdHeaderPart] = await generateFileHeaders(file.path, file.file, size);
×
182
  await resFile.append(localPart);
×
NEW
183
  cdArray.push(toArrayBuffer(cdHeaderPart));
×
184
}
×
185

1✔
186
/**
1✔
187
 * creates iterator providing buffer with file content and path to every file in the input folder
1✔
188
 * @param inputPath path to the input folder
1✔
189
 * @returns iterator
1✔
190
 */
1✔
191
export function getFileIterator(
1✔
192
  inputPath: string
×
193
): AsyncIterable<{path: string; file: ArrayBuffer}> {
×
194
  async function* iterable() {
×
195
    const fileList = await getAllFiles(inputPath);
×
196
    for (const filePath of fileList) {
×
197
      const file = await (await fetchFile(path.join(inputPath, filePath))).arrayBuffer();
×
198
      yield {path: filePath, file};
×
199
    }
×
200
  }
×
201
  return iterable();
×
202
}
×
203

1✔
204
/**
1✔
205
 * creates a list of relative paths to all files in the provided folder
1✔
206
 * @param basePath path of the root folder
1✔
207
 * @param subfolder relative path from the root folder.
1✔
208
 * @returns list of paths
1✔
209
 */
1✔
210
export async function getAllFiles(
×
211
  basePath: string,
×
212
  subfolder: string = '',
×
213
  fsPassed?: NodeFilesystem
×
214
): Promise<string[]> {
×
215
  const fs = fsPassed ? fsPassed : new NodeFilesystem({});
×
216
  const files = await fs.readdir(pathJoin(basePath, subfolder));
×
217

×
218
  const arrayOfFiles: string[] = [];
×
219

×
220
  for (const file of files) {
×
221
    const fullPath = pathJoin(basePath, subfolder, file);
×
222
    if ((await fs.stat(fullPath)).isDirectory) {
×
223
      const files = await getAllFiles(basePath, pathJoin(subfolder, file));
×
224
      arrayOfFiles.push(...files);
×
225
    } else {
×
226
      arrayOfFiles.push(pathJoin(subfolder, file));
×
227
    }
×
228
  }
×
229

×
230
  return arrayOfFiles;
×
231
}
×
232

1✔
233
/**
1✔
234
 * removes empty parts from path array and joins it
1✔
235
 * @param paths paths to join
1✔
236
 * @returns joined path
1✔
237
 */
1✔
238
function pathJoin(...paths: string[]): string {
×
239
  const resPaths: string[] = paths.filter((val) => val.length);
×
240
  return path.join(...resPaths);
×
241
}
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc