• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

graphty-org / graphty-monorepo / 20661584252

02 Jan 2026 03:50PM UTC coverage: 77.924% (+7.3%) from 70.62%
20661584252

push

github

apowers313
ci: fix flakey performance test

13438 of 17822 branches covered (75.4%)

Branch coverage included in aggregate %.

41247 of 52355 relevant lines covered (78.78%)

145534.85 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

76.85
/graphty-element/src/data/CSVDataSource.ts
1
import Papa from "papaparse";
15!
2

3
import { AdHocData } from "../config";
4
import { type CSVVariant, type CSVVariantInfo, detectCSVVariant } from "./csv-variant-detection.js";
15✔
5
import { BaseDataSourceConfig, DataSource, DataSourceChunk } from "./DataSource.js";
15✔
6

7
export interface CSVDataSourceConfig extends BaseDataSourceConfig {
8
    delimiter?: string;
9
    variant?: CSVVariant; // Allow explicit variant override
10
    sourceColumn?: string;
11
    targetColumn?: string;
12
    idColumn?: string;
13
    // For paired files
14
    nodeFile?: File;
15
    edgeFile?: File;
16
    nodeURL?: string;
17
    edgeURL?: string;
18
}
19

20
/**
21
 * Data source for loading graph data from CSV files.
22
 * Supports edge lists, adjacency lists, and paired node/edge files.
23
 */
24
export class CSVDataSource extends DataSource {
15✔
25
    static readonly type = "csv";
21✔
26

27
    private config: CSVDataSourceConfig;
28

29
    /**
30
     * Creates a new CSVDataSource instance.
31
     * @param config - Configuration options for CSV parsing and data loading
32
     */
33
    constructor(config: CSVDataSourceConfig) {
21✔
34
        super(config.errorLimit ?? 100, config.chunkSize);
49✔
35
        this.config = {
49✔
36
            delimiter: ",",
49✔
37
            chunkSize: 1000,
49✔
38
            errorLimit: 100,
49✔
39
            ...config,
49✔
40
        };
49✔
41
    }
49✔
42

43
    protected getConfig(): BaseDataSourceConfig {
21✔
44
        return this.config;
42✔
45
    }
42✔
46

47
    /**
48
     * Fetches and parses CSV data into graph chunks.
49
     * Automatically detects CSV variant (edge list, adjacency list, or paired files).
50
     * @yields DataSourceChunk objects containing parsed nodes and edges
51
     */
52
    async *sourceFetchData(): AsyncGenerator<DataSourceChunk, void, unknown> {
21✔
53
        // Handle paired files first
54
        if (this.config.nodeFile || this.config.edgeFile || this.config.nodeURL || this.config.edgeURL) {
49✔
55
            yield* this.parsePairedFiles();
7✔
56
            return;
4✔
57
        }
4✔
58

59
        // Get CSV content
60
        const csvContent = await this.getContent();
42✔
61

62
        // Parse headers to detect variant
63
        const previewResult = Papa.parse(csvContent, {
41✔
64
            header: true,
41✔
65
            preview: 1,
41✔
66
            delimiter: this.config.delimiter,
41✔
67
            dynamicTyping: true,
41✔
68
            transformHeader: (header) => header.trim(),
41✔
69
        });
41✔
70

71
        const headers = previewResult.meta.fields ?? [];
42!
72

73
        // Detect or use explicit variant
74
        let variantInfo: CSVVariantInfo;
49✔
75
        if (this.config.variant) {
49!
76
            // User specified a variant - get defaults for that variant type
77
            const variantDefaults: Record<string, Partial<CSVVariantInfo>> = {
11✔
78
                neo4j: {
11✔
79
                    hasHeaders: false,
11✔
80
                    labelColumn: ":LABEL",
11✔
81
                    typeColumn: ":TYPE",
11✔
82
                },
11✔
83
                "adjacency-list": { hasHeaders: false },
11✔
84
                "node-list": { hasHeaders: true },
11✔
85
                "edge-list": {
11✔
86
                    hasHeaders: true,
11✔
87
                    sourceColumn: "source",
11✔
88
                    targetColumn: "target",
11✔
89
                },
11✔
90
                gephi: {
11✔
91
                    hasHeaders: true,
11✔
92
                    sourceColumn: "Source",
11✔
93
                    targetColumn: "Target",
11✔
94
                    typeColumn: "Type",
11✔
95
                    labelColumn: "Label",
11✔
96
                },
11✔
97
                cytoscape: {
11✔
98
                    hasHeaders: true,
11✔
99
                    sourceColumn: "source",
11✔
100
                    targetColumn: "target",
11✔
101
                    interactionColumn: "interaction",
11✔
102
                },
11✔
103
                generic: { hasHeaders: true },
11✔
104
            };
11✔
105

106
            const defaults = variantDefaults[this.config.variant] ?? { hasHeaders: true };
11!
107
            variantInfo = {
11✔
108
                variant: this.config.variant,
11✔
109
                hasHeaders: defaults.hasHeaders ?? true,
11!
110
                delimiter: this.config.delimiter ?? ",",
11!
111
                // Use user config or variant defaults
112
                sourceColumn: this.config.sourceColumn ?? defaults.sourceColumn,
11✔
113
                targetColumn: this.config.targetColumn ?? defaults.targetColumn,
11✔
114
                idColumn: this.config.idColumn ?? defaults.idColumn,
11✔
115
                labelColumn: defaults.labelColumn,
11✔
116
                typeColumn: defaults.typeColumn,
11✔
117
                interactionColumn: defaults.interactionColumn,
11✔
118
            };
11✔
119
        } else {
42✔
120
            // Auto-detect variant
121
            variantInfo = {
30✔
122
                ...detectCSVVariant(headers),
30✔
123
                // Preserve user-specified delimiter if provided
124
                delimiter: this.config.delimiter ?? detectCSVVariant(headers).delimiter,
30!
125
            };
30✔
126
        }
30✔
127

128
        // Parse full file
129
        // Neo4j format has multiple header rows, so we parse without headers
130
        const useHeaders = variantInfo.variant === "neo4j" ? false : variantInfo.hasHeaders;
49!
131
        const fullParse = Papa.parse(csvContent, {
49✔
132
            header: useHeaders,
49✔
133
            delimiter: variantInfo.delimiter,
49✔
134
            dynamicTyping: true,
49✔
135
            skipEmptyLines: true,
49✔
136
            transformHeader: (header) => header.trim(),
49✔
137
        });
49✔
138

139
        if (fullParse.errors.length > 0) {
49!
140
            // Collect parsing errors but continue if possible
141
            for (const error of fullParse.errors) {
2✔
142
                const canContinue = this.errorAggregator.addError({
5✔
143
                    message: `CSV parsing error: ${error.message}`,
5✔
144
                    line: error.row,
5✔
145
                    category: "parse-error",
5✔
146
                });
5✔
147

148
                if (!canContinue) {
5!
149
                    throw new Error(`Too many CSV parsing errors (${this.errorAggregator.getErrorCount()}), aborting`);
×
150
                }
×
151
            }
5✔
152
        }
2✔
153

154
        // Route to appropriate parser based on variant
155
        switch (variantInfo.variant) {
41✔
156
            case "neo4j":
42!
157
                yield* this.parseNeo4jFormat(fullParse.data as string[][]);
3✔
158
                break;
3✔
159
            case "gephi":
49!
160
                yield* this.parseGephiFormat(fullParse.data as Record<string, unknown>[], variantInfo);
11✔
161
                break;
11✔
162
            case "cytoscape":
49!
163
                yield* this.parseCytoscapeFormat(fullParse.data as Record<string, unknown>[], variantInfo);
3✔
164
                break;
3✔
165
            case "adjacency-list":
49!
166
                yield* this.parseAdjacencyList(fullParse.data as string[][]);
6✔
167
                break;
6✔
168
            case "node-list":
49!
169
                yield* this.parseNodeList(fullParse.data as Record<string, unknown>[]);
2✔
170
                break;
2✔
171
            case "edge-list":
49✔
172
            case "generic":
49✔
173
            default:
49✔
174
                yield* this.parseEdgeList(fullParse.data as Record<string, unknown>[]);
16✔
175
                break;
16✔
176
        }
49✔
177
    }
49✔
178

179
    /**
180
     * Create an edge from CSV row data
181
     * Returns null if source or target is missing (and logs error)
182
     * @param src - Source node ID (will be converted to string)
183
     * @param dst - Target node ID (will be converted to string)
184
     * @param row - Full row data for additional properties
185
     * @param sourceColName - Name of source column (for error messages)
186
     * @param targetColName - Name of target column (for error messages)
187
     * @param rowIndex - Row index (for error messages)
188
     * @returns Edge data object or null if invalid
189
     */
190
    private createEdge(
21✔
191
        src: unknown,
5,494✔
192
        dst: unknown,
5,494✔
193
        row: Record<string, unknown>,
5,494✔
194
        sourceColName: string,
5,494✔
195
        targetColName: string,
5,494✔
196
        rowIndex: number,
5,494✔
197
    ): AdHocData | null {
5,494✔
198
        // Validate source and target exist
199
        if (src === null || src === undefined || src === "") {
5,494!
200
            this.errorAggregator.addError({
9✔
201
                message: `Missing source in row ${rowIndex} (column: ${sourceColName})`,
9✔
202
                line: rowIndex,
9✔
203
                category: "missing-data",
9✔
204
                field: "source",
9✔
205
            });
9✔
206
            return null;
9✔
207
        }
9✔
208

209
        if (dst === null || dst === undefined || dst === "") {
5,494!
210
            this.errorAggregator.addError({
4✔
211
                message: `Missing target in row ${rowIndex} (column: ${targetColName})`,
4✔
212
                line: rowIndex,
4✔
213
                category: "missing-data",
4✔
214
                field: "target",
4✔
215
            });
4✔
216
            return null;
4✔
217
        }
4✔
218

219
        // Convert to strings (CSV parsers may return numbers/booleans)
220
        const srcStr = typeof src === "string" || typeof src === "number" ? String(src) : JSON.stringify(src);
5,494!
221
        const dstStr = typeof dst === "string" || typeof dst === "number" ? String(dst) : JSON.stringify(dst);
5,494!
222

223
        // Create edge with all row properties except source/target columns
224
        // (they're now in src/dst)
225
        const edge: Record<string, unknown> = {
5,494✔
226
            src: srcStr,
5,494✔
227
            dst: dstStr,
5,494✔
228
        };
5,494✔
229

230
        // Copy all other properties from row except source/target columns
231
        for (const key in row) {
5,494✔
232
            if (key !== sourceColName && key !== targetColName) {
11,532!
233
                edge[key] = row[key];
604✔
234
            }
604✔
235
        }
11,532✔
236

237
        return edge as AdHocData;
5,481✔
238
    }
5,494✔
239

240
    private *parseEdgeList(rows: Record<string, unknown>[]): Generator<DataSourceChunk, void, unknown> {
21✔
241
        const edges: unknown[] = [];
16✔
242
        const nodeIds = new Set<string>();
16✔
243
        // Default to lowercase column names for generic edge lists
244
        const sourceCol = this.config.sourceColumn ?? "source";
16✔
245
        const targetCol = this.config.targetColumn ?? "target";
16✔
246

247
        for (let i = 0; i < rows.length; i++) {
16✔
248
            try {
5,043✔
249
                const row = rows[i];
5,043✔
250
                const edge = this.createEdge(row[sourceCol], row[targetCol], row, sourceCol, targetCol, i + 1);
5,043✔
251

252
                if (edge) {
5,043✔
253
                    // Track unique node IDs
254
                    nodeIds.add(edge.src as string);
5,035✔
255
                    nodeIds.add(edge.dst as string);
5,035✔
256

257
                    edges.push(edge);
5,035✔
258

259
                    // Yield chunk when full
260
                    if (edges.length >= this.chunkSize) {
5,035!
261
                        yield { nodes: [] as AdHocData[], edges: edges.splice(0, this.chunkSize) as AdHocData[] };
5✔
262
                    }
5✔
263
                }
5,035✔
264
            } catch (error) {
5,043!
265
                const canContinue = this.errorAggregator.addError({
×
266
                    message: `Failed to parse row ${i + 1}: ${error instanceof Error ? error.message : String(error)}`,
×
267
                    line: i,
×
268
                    category: "parse-error",
×
269
                });
×
270

271
                if (!canContinue) {
×
272
                    throw new Error(`Too many errors (${this.errorAggregator.getErrorCount()}), aborting parse`);
×
273
                }
×
274
            }
×
275
        }
5,043✔
276

277
        // Create nodes from unique IDs and yield final chunk
278
        const nodes: unknown[] = Array.from(nodeIds).map((id) => ({ id }));
16✔
279

280
        // Always yield final chunk to ensure nodes are included
281
        // (edges may have been yielded in earlier chunks, but nodes are collected at the end)
282
        yield { nodes: nodes as AdHocData[], edges: edges as AdHocData[] };
16✔
283
    }
16✔
284

285
    private *parseNodeList(rows: Record<string, unknown>[]): Generator<DataSourceChunk, void, unknown> {
21✔
286
        const nodes: unknown[] = [];
2✔
287

288
        for (let i = 0; i < rows.length; i++) {
2✔
289
            try {
11✔
290
                const row = rows[i];
11✔
291

292
                // Try to find ID
293
                const id = row.id ?? row.Id ?? row.ID ?? String(i);
11!
294

295
                const node: Record<string, unknown> = {
11✔
296
                    id,
11✔
297
                    ...row,
11✔
298
                };
11✔
299

300
                nodes.push(node);
11✔
301

302
                // Yield chunk when full
303
                if (nodes.length >= this.chunkSize) {
11!
304
                    yield {
×
305
                        nodes: nodes.splice(0, this.chunkSize) as AdHocData[],
×
306
                        edges: [] as AdHocData[],
×
307
                    };
×
308
                }
×
309
            } catch (error) {
11!
310
                const canContinue = this.errorAggregator.addError({
×
311
                    message: `Failed to parse row ${i + 1}: ${error instanceof Error ? error.message : String(error)}`,
×
312
                    line: i,
×
313
                    category: "parse-error",
×
314
                });
×
315

316
                if (!canContinue) {
×
317
                    throw new Error(`Too many errors (${this.errorAggregator.getErrorCount()}), aborting parse`);
×
318
                }
×
319
            }
×
320
        }
11✔
321

322
        // Yield remaining nodes
323
        if (nodes.length > 0) {
2✔
324
            yield { nodes: nodes as AdHocData[], edges: [] as AdHocData[] };
2✔
325
        }
2✔
326
    }
2✔
327

328
    private *parseNeo4jFormat(rows: string[][]): Generator<DataSourceChunk, void, unknown> {
21✔
329
        // Neo4j format has multiple sections with headers
330
        // Format: header row, data rows, header row, data rows, etc.
331
        const nodes: unknown[] = [];
3✔
332
        const edges: unknown[] = [];
3✔
333

334
        let currentHeaders: string[] = [];
3✔
335
        let isNodeSection = false;
3✔
336
        let isEdgeSection = false;
3✔
337

338
        for (const row of rows) {
3✔
339
            if (row.length === 0) {
18!
340
                continue;
×
341
            }
×
342

343
            // Check if this is a header row by looking for Neo4j special columns
344
            const hasIdColumn = row.some((col) => typeof col === "string" && col.endsWith(":ID"));
18✔
345
            const hasStartEnd = row.some((col) => col === ":START_ID" || col === ":END_ID");
18✔
346

347
            if (hasIdColumn || hasStartEnd) {
18✔
348
                // This is a header row
349
                currentHeaders = row.map((h) => h.trim());
6✔
350
                isNodeSection = hasIdColumn && !hasStartEnd;
6✔
351
                isEdgeSection = hasStartEnd;
6✔
352
                continue;
6✔
353
            }
6✔
354

355
            // Process data row based on current section
356
            if (isNodeSection) {
18✔
357
                const node: Record<string, unknown> = {};
7✔
358
                for (let i = 0; i < Math.min(row.length, currentHeaders.length); i++) {
7✔
359
                    const header = currentHeaders[i];
24✔
360
                    const value = row[i];
24✔
361

362
                    if (header.endsWith(":ID")) {
24✔
363
                        node.id = value;
7✔
364
                    } else if (header === ":LABEL") {
24✔
365
                        node.label = value;
7✔
366
                    } else if (!header.startsWith(":")) {
13✔
367
                        node[header] = value;
10✔
368
                    }
10✔
369
                }
24✔
370

371
                if (node.id) {
7✔
372
                    nodes.push(node);
7✔
373
                }
7✔
374
            } else if (isEdgeSection) {
13✔
375
                const edge: Record<string, unknown> = {};
5✔
376
                for (let i = 0; i < Math.min(row.length, currentHeaders.length); i++) {
5✔
377
                    const header = currentHeaders[i];
22✔
378
                    const value = row[i];
22✔
379

380
                    if (header === ":START_ID") {
22✔
381
                        edge.src = value;
5✔
382
                    } else if (header === ":END_ID") {
22✔
383
                        edge.dst = value;
5✔
384
                    } else if (header === ":TYPE") {
17✔
385
                        edge.type = value;
5✔
386
                    } else if (!header.startsWith(":")) {
12✔
387
                        edge[header] = value;
7✔
388
                    }
7✔
389
                }
22✔
390

391
                if (edge.src && edge.dst) {
5✔
392
                    edges.push(edge);
5✔
393
                }
5✔
394
            }
5✔
395

396
            // Yield in chunks
397
            if (nodes.length >= this.chunkSize) {
18!
398
                yield {
×
399
                    nodes: nodes.splice(0, this.chunkSize) as AdHocData[],
×
400
                    edges: [],
×
401
                };
×
402
            }
✔
403

404
            if (edges.length >= this.chunkSize) {
18!
405
                yield {
×
406
                    nodes: [],
×
407
                    edges: edges.splice(0, this.chunkSize) as AdHocData[],
×
408
                };
×
409
            }
×
410
        }
18✔
411

412
        // Yield remaining
413
        if (nodes.length > 0 || edges.length > 0) {
3!
414
            yield { nodes: nodes as AdHocData[], edges: edges as AdHocData[] };
3✔
415
        }
3✔
416
    }
3✔
417

418
    private *parseGephiFormat(
21✔
419
        rows: Record<string, unknown>[],
11✔
420
        info: CSVVariantInfo,
11✔
421
    ): Generator<DataSourceChunk, void, unknown> {
11✔
422
        // Gephi uses capitalized column names: Source, Target, Type, Id, Label, Weight
423
        const edges: unknown[] = [];
11✔
424
        const nodeIds = new Set<string>();
11✔
425
        const sourceCol = info.sourceColumn ?? "Source";
11!
426
        const targetCol = info.targetColumn ?? "Target";
11!
427

428
        for (let i = 0; i < rows.length; i++) {
11✔
429
            try {
425✔
430
                const row = rows[i];
425✔
431
                const edge = this.createEdge(row[sourceCol], row[targetCol], row, sourceCol, targetCol, i + 1);
425✔
432

433
                if (edge) {
425✔
434
                    // Track unique node IDs
435
                    nodeIds.add(edge.src as string);
420✔
436
                    nodeIds.add(edge.dst as string);
420✔
437

438
                    edges.push(edge);
420✔
439

440
                    // Yield chunk when full
441
                    if (edges.length >= this.chunkSize) {
420!
442
                        yield {
×
443
                            nodes: [] as AdHocData[],
×
444
                            edges: edges.splice(0, this.chunkSize) as AdHocData[],
×
445
                        };
×
446
                    }
×
447
                }
420✔
448
            } catch (error) {
425!
449
                const canContinue = this.errorAggregator.addError({
×
450
                    message: `Failed to parse row ${i + 1}: ${error instanceof Error ? error.message : String(error)}`,
×
451
                    line: i,
×
452
                    category: "parse-error",
×
453
                });
×
454

455
                if (!canContinue) {
×
456
                    throw new Error(`Too many errors (${this.errorAggregator.getErrorCount()}), aborting parse`);
×
457
                }
×
458
            }
×
459
        }
425✔
460

461
        // Create nodes from unique IDs and yield final chunk
462
        const nodes: unknown[] = Array.from(nodeIds).map((id) => ({ id }));
11✔
463
        yield { nodes: nodes as AdHocData[], edges: edges as AdHocData[] };
11✔
464
    }
11✔
465

466
    private *parseCytoscapeFormat(
21✔
467
        rows: Record<string, unknown>[],
3✔
468
        info: CSVVariantInfo,
3✔
469
    ): Generator<DataSourceChunk, void, unknown> {
3✔
470
        // Cytoscape has an 'interaction' column for edge type
471
        const edges: unknown[] = [];
3✔
472
        const nodeIds = new Set<string>();
3✔
473
        const sourceCol = info.sourceColumn ?? "source";
3!
474
        const targetCol = info.targetColumn ?? "target";
3!
475

476
        for (let i = 0; i < rows.length; i++) {
3✔
477
            try {
9✔
478
                const row = rows[i];
9✔
479
                const edge = this.createEdge(row[sourceCol], row[targetCol], row, sourceCol, targetCol, i + 1);
9✔
480

481
                if (edge) {
9✔
482
                    // Track unique node IDs
483
                    nodeIds.add(edge.src as string);
9✔
484
                    nodeIds.add(edge.dst as string);
9✔
485

486
                    edges.push(edge);
9✔
487

488
                    if (edges.length >= this.chunkSize) {
9!
489
                        yield {
×
490
                            nodes: [] as AdHocData[],
×
491
                            edges: edges.splice(0, this.chunkSize) as AdHocData[],
×
492
                        };
×
493
                    }
×
494
                }
9✔
495
            } catch (error) {
9!
496
                const canContinue = this.errorAggregator.addError({
×
497
                    message: `Failed to parse row ${i + 1}: ${error instanceof Error ? error.message : String(error)}`,
×
498
                    line: i,
×
499
                    category: "parse-error",
×
500
                });
×
501

502
                if (!canContinue) {
×
503
                    throw new Error(`Too many errors (${this.errorAggregator.getErrorCount()}), aborting parse`);
×
504
                }
×
505
            }
×
506
        }
9✔
507

508
        const nodes: unknown[] = Array.from(nodeIds).map((id) => ({ id }));
3✔
509
        yield { nodes: nodes as AdHocData[], edges: edges as AdHocData[] };
3✔
510
    }
3✔
511

512
    private *parseAdjacencyList(rows: string[][]): Generator<DataSourceChunk, void, unknown> {
21✔
513
        // Format: each row is [node, neighbor1, neighbor2, ...]
514
        // Can optionally have weights: node neighbor1:weight1 neighbor2:weight2
515
        const edges: unknown[] = [];
6✔
516
        const nodeIds = new Set<string>();
6✔
517

518
        for (let rowIndex = 0; rowIndex < rows.length; rowIndex++) {
6✔
519
            const row = rows[rowIndex];
17✔
520
            if (row.length < 2) {
17!
521
                continue;
7✔
522
            }
7✔
523

524
            const sourceNode = row[0];
10✔
525
            nodeIds.add(sourceNode);
10✔
526

527
            // Process neighbors
528
            for (let j = 1; j < row.length; j++) {
10✔
529
                const neighbor = row[j];
17✔
530
                if (!neighbor) {
17!
531
                    continue;
×
532
                }
×
533

534
                // Check for weight notation: neighbor:weight
535
                let targetNode = neighbor;
17✔
536
                const rowData: Record<string, unknown> = {};
17✔
537

538
                if (neighbor.includes(":")) {
17✔
539
                    const parts = neighbor.split(":");
12✔
540
                    targetNode = parts[0];
12✔
541
                    const weight = parseFloat(parts[1]);
12✔
542
                    if (!isNaN(weight)) {
12✔
543
                        rowData.weight = weight;
12✔
544
                    }
12✔
545
                }
12✔
546

547
                const edge = this.createEdge(sourceNode, targetNode, rowData, "source", "target", rowIndex + 1);
17✔
548

549
                if (edge) {
17✔
550
                    nodeIds.add(edge.src as string);
17✔
551
                    nodeIds.add(edge.dst as string);
17✔
552

553
                    edges.push(edge);
17✔
554

555
                    if (edges.length >= this.chunkSize) {
17!
556
                        yield {
×
557
                            nodes: [] as AdHocData[],
×
558
                            edges: edges.splice(0, this.chunkSize) as AdHocData[],
×
559
                        };
×
560
                    }
×
561
                }
17✔
562
            }
17✔
563
        }
10✔
564

565
        // Create nodes for all unique node IDs
566
        const nodes = Array.from(nodeIds).map((id) => ({ id }));
6✔
567
        yield { nodes: nodes as unknown as AdHocData[], edges: edges as AdHocData[] };
6✔
568
    }
6✔
569

570
    private async *parsePairedFiles(): AsyncGenerator<DataSourceChunk, void, unknown> {
21✔
571
        // Validate that both URLs or both files are provided
572
        const hasNodeSource = !!(this.config.nodeURL ?? this.config.nodeFile);
7!
573
        const hasEdgeSource = !!(this.config.edgeURL ?? this.config.edgeFile);
7!
574

575
        if (!hasNodeSource || !hasEdgeSource) {
7!
576
            throw new Error(
3✔
577
                "parsePairedFiles requires both node and edge sources. " +
3✔
578
                    "Provide either (nodeURL + edgeURL) or (nodeFile + edgeFile).",
579
            );
3✔
580
        }
3✔
581

582
        // Load and parse node file
583
        const nodes: unknown[] = [];
4✔
584
        if (this.config.nodeFile ?? this.config.nodeURL) {
7!
585
            const nodeContent = this.config.nodeFile
4!
586
                ? await this.config.nodeFile.text()
3!
587
                : await (await this.fetchWithRetry(this.config.nodeURL ?? "")).text();
1!
588

589
            const nodeParse = Papa.parse(nodeContent, {
1✔
590
                header: true,
1✔
591
                dynamicTyping: true,
1✔
592
                skipEmptyLines: true,
1✔
593
                transformHeader: (header) => header.trim(),
1✔
594
            });
1✔
595

596
            for (const row of nodeParse.data as Record<string, unknown>[]) {
4✔
597
                nodes.push({
2,510✔
598
                    id: row.id ?? row.Id ?? row.ID,
2,510!
599
                    ...row,
2,510✔
600
                });
2,510✔
601
            }
2,510✔
602
        }
4✔
603

604
        // Load and parse edge file
605
        const edges: unknown[] = [];
4✔
606
        if (this.config.edgeFile ?? this.config.edgeURL) {
7!
607
            const edgeContent = this.config.edgeFile
4!
608
                ? await this.config.edgeFile.text()
3!
609
                : await (await this.fetchWithRetry(this.config.edgeURL ?? "")).text();
1!
610

611
            const edgeParse = Papa.parse(edgeContent, {
1✔
612
                header: true,
1✔
613
                dynamicTyping: true,
1✔
614
                skipEmptyLines: true,
1✔
615
                transformHeader: (header) => header.trim(),
1✔
616
            });
1✔
617

618
            for (const row of edgeParse.data as Record<string, unknown>[]) {
4✔
619
                edges.push({
110✔
620
                    src: row.source ?? row.src ?? row.Source,
110!
621
                    dst: row.target ?? row.dst ?? row.Target,
110!
622
                    ...row,
110✔
623
                });
110✔
624
            }
110✔
625
        }
4✔
626

627
        // Yield data in chunks using inherited helper
628
        yield* this.chunkData(nodes as AdHocData[], edges as AdHocData[]);
4✔
629
    }
7✔
630
}
21✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc