• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

HicServices / RDMP / 6245535001

20 Sep 2023 07:44AM UTC coverage: 57.013%. First build
6245535001

push

github

web-flow
8.1.0 Release (#1628)

* Bump Newtonsoft.Json from 13.0.1 to 13.0.2

Bumps [Newtonsoft.Json](https://github.com/JamesNK/Newtonsoft.Json) from 13.0.1 to 13.0.2.
- [Release notes](https://github.com/JamesNK/Newtonsoft.Json/releases)
- [Commits](https://github.com/JamesNK/Newtonsoft.Json/compare/13.0.1...13.0.2)

---
updated-dependencies:
- dependency-name: Newtonsoft.Json
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>

* Bump NLog from 5.0.5 to 5.1.0

Bumps [NLog](https://github.com/NLog/NLog) from 5.0.5 to 5.1.0.
- [Release notes](https://github.com/NLog/NLog/releases)
- [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md)
- [Commits](https://github.com/NLog/NLog/compare/v5.0.5...v5.1.0)

---
updated-dependencies:
- dependency-name: NLog
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* Bump NLog from 5.0.5 to 5.1.0

* Fix -r flag - should have been --results-directory all along

* Bump Newtonsoft.Json from 13.0.1 to 13.0.2

* Bump YamlDotNet from 12.0.2 to 12.1.0

Bumps [YamlDotNet](https://github.com/aaubry/YamlDotNet) from 12.0.2 to 12.1.0.
- [Release notes](https://github.com/aaubry/YamlDotNet/releases)
- [Commits](https://github.com/aaubry/YamlDotNet/compare/v12.0.2...v12.1.0)

---
updated-dependencies:
- dependency-name: YamlDotNet
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* Bump Moq from 4.18.2 to 4.18.3

Bumps [Moq](https://github.com/moq/moq4) from 4.18.2 to 4.18.3.
- [Release notes](https://github.com/moq/moq4/releases)
- [Changelog](https://github.com/moq/moq4/blob/main/CHANGELOG.md)
- [Commits](https://github.com/moq/moq4/compare/v4.18.2...v4.18.3)

---
updated-dependencies:
- dependency-name: Moq
... (continued)

10732 of 20257 branches covered (0.0%)

Branch coverage included in aggregate %.

48141 of 48141 new or added lines in 1086 files covered. (100.0%)

30685 of 52388 relevant lines covered (58.57%)

7387.88 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

81.36
/Rdmp.Core/DataExport/DataExtraction/Pipeline/Destinations/ExtractionDestination.cs
1
// Copyright (c) The University of Dundee 2018-2019
2
// This file is part of the Research Data Management Platform (RDMP).
3
// RDMP is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
4
// RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
5
// You should have received a copy of the GNU General Public License along with RDMP. If not, see <https://www.gnu.org/licenses/>.
6

7
using System;
8
using System.Data;
9
using System.Diagnostics;
10
using System.IO;
11
using FAnsi.Discovery;
12
using Microsoft.Data.SqlClient;
13
using Rdmp.Core.Curation;
14
using Rdmp.Core.Curation.Data;
15
using Rdmp.Core.DataExport.Data;
16
using Rdmp.Core.DataExport.DataExtraction.Commands;
17
using Rdmp.Core.DataExport.DataExtraction.UserPicks;
18
using Rdmp.Core.DataExport.DataRelease.Pipeline;
19
using Rdmp.Core.DataExport.DataRelease.Potential;
20
using Rdmp.Core.DataFlowPipeline;
21
using Rdmp.Core.DataFlowPipeline.Requirements;
22
using Rdmp.Core.Logging;
23
using Rdmp.Core.MapsDirectlyToDatabaseTable;
24
using Rdmp.Core.Repositories;
25
using Rdmp.Core.ReusableLibraryCode.Checks;
26
using Rdmp.Core.ReusableLibraryCode.DataAccess;
27
using Rdmp.Core.ReusableLibraryCode.Progress;
28

29
namespace Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations;
30

31
public abstract class ExtractionDestination : IExecuteDatasetExtractionDestination, IPipelineRequirement<IProject>
32
{
33
    //user configurable properties
34

35
    [DemandsInitialization(
36
        "Naming of flat files is usually based on Catalogue.Name, if this is true then the Catalogue.Acronym will be used instead",
37
        defaultValue: false)]
38
    public bool UseAcronymForFileNaming { get; set; }
218✔
39

40
    [DemandsInitialization(
41
        "The date format to output all datetime fields in e.g. dd/MM/yyyy for uk format yyyy-MM-dd for something more machine processable, see https://msdn.microsoft.com/en-us/library/8kb3ddd4(v=vs.110).aspx",
42
        DemandType.Unspecified, "yyyy-MM-dd", Mandatory = true)]
43
    public string DateFormat { get; set; }
244✔
44

45
    [DemandsInitialization(
46
        "If this is true, the dataset/globals extraction folder will be wiped clean before extracting the dataset. Useful if you suspect there are spurious files in the folder",
47
        defaultValue: false)]
48
    public bool CleanExtractionFolderBeforeExtraction { get; set; }
80✔
49

50
    public bool GeneratesFiles { get; }
178✔
51

52
    [DemandsInitialization(@"Overrides the extraction sub directory of datasets as they are extracted
53
         $c - Configuration Name (e.g. 'Cases')
54
         $i - Configuration ID (e.g. 459)
55
         $d - Dataset name (e.g. 'Prescribing')
56
         $a - Dataset acronym (e.g. 'Presc')
57
         $n - Dataset ID (e.g. 459)
58

59
e.g. /$i/$a")]
60
    public string ExtractionSubdirectoryPattern { get; set; }
306✔
61

62
    //PreInitialize fields
63
    protected IExtractCommand _request;
64
    protected DataLoadInfo _dataLoadInfo;
65
    protected IProject _project;
66

67
    //state variables
68
    protected bool haveOpened;
69
    private bool haveWrittenBundleContents;
70
    private Stopwatch stopwatch = new();
116✔
71

72
    public TableLoadInfo TableLoadInfo { get; private set; }
376✔
73

74
    public DirectoryInfo DirectoryPopulated { get; private set; }
256✔
75

76
    public int SeparatorsStrippedOut { get; set; }
42✔
77

78
    public int LinesWritten { get; protected set; }
10,324✔
79
    public string OutputFile { get; protected set; } = string.Empty;
742✔
80

81
    public ExtractionDestination(bool generatesFiles)
116✔
82
    {
83
        GeneratesFiles = generatesFiles;
116✔
84
    }
116✔
85

86
    #region PreInitialize
87

88
    public void PreInitialize(IExtractCommand request, IDataLoadEventListener listener)
89
    {
90
        _request = request;
110✔
91

92
        if (_request == ExtractDatasetCommand.EmptyCommand)
110!
93
        {
94
            listener.OnNotify(this,
×
95
                new NotifyEventArgs(ProgressEventType.Information,
×
96
                    "Request is ExtractDatasetCommand.EmptyCommand, checking will not be carried out"));
×
97
            return;
×
98
        }
99

100
        LinesWritten = 0;
110✔
101

102
        DirectoryPopulated = request.GetExtractionDirectory();
110✔
103

104
        PreInitializeImpl(request, listener);
110✔
105
    }
110✔
106

107
    protected abstract void PreInitializeImpl(IExtractCommand request, IDataLoadEventListener listener);
108

109

110
    public virtual void PreInitialize(DataLoadInfo value, IDataLoadEventListener listener)
111
    {
112
        _dataLoadInfo = value;
104✔
113
    }
104✔
114

115
    public virtual void PreInitialize(IProject value, IDataLoadEventListener listener)
116
    {
117
        _project = value;
110✔
118
    }
110✔
119

120
    #endregion
121

122
    /// <inheritdoc/>
123
    public virtual string GetFilename()
124
    {
125
        var filename = _request.ToString();
140✔
126

127
        if (_request is IExtractDatasetCommand datasetCommand && UseAcronymForFileNaming)
140!
128
        {
129
            filename = datasetCommand.Catalogue.Acronym;
×
130
            if (string.IsNullOrWhiteSpace(filename))
×
131
                throw new Exception(
×
132
                    $"Catalogue '{datasetCommand.Catalogue}' does not have an Acronym but UseAcronymForFileNaming is true");
×
133
        }
134

135
        return filename;
140✔
136
    }
137

138
    /// <summary>
139
    /// Extracts the rows in <paramref name="toProcess"/> to the extraction destination
140
    /// </summary>
141
    /// <param name="toProcess"></param>
142
    /// <param name="job"></param>
143
    /// <param name="cancellationToken"></param>
144
    /// <returns></returns>
145
    public virtual DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener job,
146
        GracefulCancellationToken cancellationToken)
147
    {
148
        _request.ElevateState(ExtractCommandState.WritingToFile);
50✔
149

150
        if (!haveWrittenBundleContents && _request is ExtractDatasetCommand extractDatasetCommand)
50✔
151
        {
152
            WriteBundleContents(extractDatasetCommand.DatasetBundle, job, cancellationToken);
48✔
153
            haveWrittenBundleContents = true;
48✔
154
        }
155

156
        if (_request is ExtractGlobalsCommand extractGlobalsCommand)
50✔
157
        {
158
            ExtractGlobals(extractGlobalsCommand, job, _dataLoadInfo);
2✔
159
            return null;
2✔
160
        }
161

162
        stopwatch.Start();
48✔
163
        if (!haveOpened)
48✔
164
        {
165
            haveOpened = true;
48✔
166
            LinesWritten = 0;
48✔
167
            Open(toProcess, job, cancellationToken);
48✔
168

169
            //create an audit object
170
            TableLoadInfo = new TableLoadInfo(_dataLoadInfo, "", OutputFile,
48✔
171
                new DataSource[] { new(_request.DescribeExtractionImplementation(), DateTime.Now) }, -1);
48✔
172
        }
173

174
        WriteRows(toProcess, job, cancellationToken, stopwatch);
48✔
175

176
        if (TableLoadInfo.IsClosed)
48!
177
            throw new Exception(
×
178
                $"TableLoadInfo was closed so could not write number of rows ({LinesWritten}) to audit object - most likely the extraction crashed?");
×
179
        TableLoadInfo.Inserts = LinesWritten;
48✔
180

181
        Flush(job, cancellationToken, stopwatch);
48✔
182
        stopwatch.Stop();
48✔
183

184
        return null;
48✔
185
    }
186

187
    #region Abstract Extraction Methods
188

189
    /// <inheritdoc/>
190
    public abstract string GetDestinationDescription();
191

192
    /// <summary>
193
    /// Called once on receiving the first batch of records, this is where you should create / open your output stream (or create a table
194
    /// if you are extracting to database).
195
    /// </summary>
196
    /// <param name="toProcess"></param>
197
    /// <param name="job"></param>
198
    /// <param name="cancellationToken"></param>
199
    protected abstract void Open(DataTable toProcess, IDataLoadEventListener job,
200
        GracefulCancellationToken cancellationToken);
201

202
    /// <summary>
203
    /// Called once per batch of records to be extracted, these should be written to the output stream you opened in <see cref="Open"/>
204
    /// </summary>
205
    /// <param name="toProcess"></param>
206
    /// <param name="job"></param>
207
    /// <param name="cancellationToken"></param>
208
    /// <param name="stopwatch"></param>
209
    protected abstract void WriteRows(DataTable toProcess, IDataLoadEventListener job,
210
        GracefulCancellationToken cancellationToken, Stopwatch stopwatch);
211

212
    /// <summary>
213
    /// Called after each batch is written, allows you to flush your stream (if required)
214
    /// </summary>
215
    /// <param name="job"></param>
216
    /// <param name="cancellationToken"></param>
217
    /// <param name="stopwatch"></param>
218
    protected virtual void Flush(IDataLoadEventListener job, GracefulCancellationToken cancellationToken,
219
        Stopwatch stopwatch)
220
    {
221
    }
2✔
222

223
    /// <inheritdoc/>
224
    public abstract void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny);
225

226
    /// <inheritdoc/>
227
    public abstract void Abort(IDataLoadEventListener listener);
228

229
    /// <inheritdoc/>
230
    public virtual void Check(ICheckNotifier notifier)
231
    {
232
        if (!string.IsNullOrWhiteSpace(ExtractionSubdirectoryPattern))
34✔
233
        {
234
            if (ExtractionSubdirectoryPattern.Contains('.'))
28✔
235
                notifier.OnCheckPerformed(new CheckEventArgs(
2✔
236
                    "ExtractionSubdirectoryPattern cannot contain dots, it must be relative e.g. $c/$d",
2✔
237
                    CheckResult.Fail));
2✔
238

239
            if (!ExtractionSubdirectoryPattern.Contains("$i") && !ExtractionSubdirectoryPattern.Contains("$c"))
26✔
240
                notifier.OnCheckPerformed(new CheckEventArgs(
10✔
241
                    "ExtractionSubdirectoryPattern must contain a Configuration element ($i or $c)",
10✔
242
                    CheckResult.Fail));
10✔
243

244
            if (!ExtractionSubdirectoryPattern.Contains("$a") && !ExtractionSubdirectoryPattern.Contains("$d") &&
16✔
245
                !ExtractionSubdirectoryPattern.Contains("$n"))
16✔
246
                notifier.OnCheckPerformed(new CheckEventArgs(
4✔
247
                    "ExtractionSubdirectoryPattern must contain a Dataset element ($d, $a or $n)",
4✔
248
                    CheckResult.Fail));
4✔
249
        }
250
    }
18✔
251

252
    #endregion
253

254

255
    #region Release Related Methods
256

257
    /// <inheritdoc/>
258
    public abstract ReleasePotential GetReleasePotential(IRDMPPlatformRepositoryServiceLocator repositoryLocator,
259
        ISelectedDataSets selectedDataSet);
260

261
    /// <inheritdoc/>
262
    public abstract GlobalReleasePotential GetGlobalReleasabilityEvaluator(
263
        IRDMPPlatformRepositoryServiceLocator repositoryLocator, ISupplementalExtractionResults globalResult,
264
        IMapsDirectlyToDatabaseTable globalToCheck);
265

266
    /// <inheritdoc/>
267
    public abstract FixedReleaseSource<ReleaseAudit> GetReleaseSource(ICatalogueRepository catalogueRepository);
268

269
    #endregion
270

271

272
    #region Bundled Content (and Globals)
273

274
    private void ExtractGlobals(ExtractGlobalsCommand request, IDataLoadEventListener listener,
275
        DataLoadInfo dataLoadInfo)
276
    {
277
        var globalsDirectory = GetDirectoryFor(request);
2✔
278
        if (CleanExtractionFolderBeforeExtraction)
2!
279
        {
280
            globalsDirectory.Delete(true);
×
281
            globalsDirectory.Create();
×
282
        }
283

284
        foreach (var doc in request.Globals.Documents)
8✔
285
            request.Globals.States[doc] = TryExtractSupportingDocument(doc, globalsDirectory, listener)
2!
286
                ? ExtractCommandState.Completed
2✔
287
                : ExtractCommandState.Crashed;
2✔
288

289
        foreach (var sql in request.Globals.SupportingSQL)
8✔
290
            request.Globals.States[sql] =
2!
291
                TryExtractSupportingSQLTable(sql, globalsDirectory, request.Configuration, listener, dataLoadInfo)
2✔
292
                    ? ExtractCommandState.Completed
2✔
293
                    : ExtractCommandState.Crashed;
2✔
294
    }
2✔
295

296
    private void WriteBundleContents(IExtractableDatasetBundle datasetBundle, IDataLoadEventListener job,
297
        GracefulCancellationToken cancellationToken)
298
    {
299
        var rootDir = GetDirectoryFor(_request);
48✔
300
        var supportingSQLFolder =
48✔
301
            new DirectoryInfo(Path.Combine(rootDir.FullName, SupportingSQLTable.ExtractionFolderName));
48✔
302
        var lookupDir = rootDir.CreateSubdirectory("Lookups");
48✔
303

304
        //extract the documents
305
        foreach (var doc in datasetBundle.Documents)
100✔
306
            datasetBundle.States[doc] = TryExtractSupportingDocument(doc, rootDir, job)
2!
307
                ? ExtractCommandState.Completed
2✔
308
                : ExtractCommandState.Crashed;
2✔
309

310
        //extract supporting SQL
311
        foreach (var sql in datasetBundle.SupportingSQL)
100✔
312
            datasetBundle.States[sql] =
2!
313
                TryExtractSupportingSQLTable(sql, supportingSQLFolder, _request.Configuration, job, _dataLoadInfo)
2✔
314
                    ? ExtractCommandState.Completed
2✔
315
                    : ExtractCommandState.Crashed;
2✔
316

317
        //extract lookups
318
        foreach (BundledLookupTable lookup in datasetBundle.LookupTables)
100✔
319
            datasetBundle.States[lookup] = TryExtractLookupTable(lookup, lookupDir, job)
2!
320
                ? ExtractCommandState.Completed
2✔
321
                : ExtractCommandState.Crashed;
2✔
322
    }
48✔
323

324
    public DirectoryInfo GetDirectoryFor(IExtractCommand request)
325
    {
326
        if (string.IsNullOrWhiteSpace(ExtractionSubdirectoryPattern) || request is not IExtractDatasetCommand cmd)
62✔
327
            return request.GetExtractionDirectory();
50✔
328

329
        var cata = cmd.SelectedDataSets.ExtractableDataSet.Catalogue;
12✔
330

331
        if (ExtractionSubdirectoryPattern.Contains("$a") && string.IsNullOrWhiteSpace(cata.Acronym))
12!
332
            throw new Exception(
×
333
                $"Catalogue {cata} does not have an Acronym and ExtractionSubdirectoryPattern contains $a");
×
334

335
        var path = Path.Combine(cmd.Project.ExtractionDirectory,
12✔
336
            ExtractionSubdirectoryPattern
12✔
337
                .Replace("$c", QuerySyntaxHelper.MakeHeaderNameSensible(cmd.Configuration.Name))
12✔
338
                .Replace("$i", cmd.Configuration.ID.ToString())
12✔
339
                .Replace("$d", QuerySyntaxHelper.MakeHeaderNameSensible(cata.Name))
12✔
340
                .Replace("$a", QuerySyntaxHelper.MakeHeaderNameSensible(cata.Acronym))
12✔
341
                .Replace("$n", cata.ID.ToString())
12✔
342
        );
12✔
343

344
        var dir = new DirectoryInfo(path);
12✔
345
        if (!dir.Exists)
12✔
346
            dir.Create();
6✔
347

348
        return dir;
12✔
349
    }
350

351
    protected bool TryExtractLookupTable(BundledLookupTable lookup, DirectoryInfo lookupDir, IDataLoadEventListener job)
352
    {
353
        var sw = new Stopwatch();
2✔
354
        sw.Start();
2✔
355

356
        job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"About to extract lookup {lookup}"));
2✔
357

358
        try
359
        {
360
            TryExtractLookupTableImpl(lookup, lookupDir, _request.Configuration, job, out var linesWritten,
2✔
361
                out var destinationDescription);
2✔
362

363
            sw.Stop();
2✔
364
            job.OnProgress(this,
2✔
365
                new ProgressEventArgs($"Lookup {lookup}", new ProgressMeasurement(linesWritten, ProgressType.Records),
2✔
366
                    sw.Elapsed));
2✔
367

368
            //audit in the log the extraction
369
            var tableLoadInfo = _dataLoadInfo.CreateTableLoadInfo("", destinationDescription, new[]
2✔
370
            {
2✔
371
                new DataSource(
2✔
372
                    $"SELECT * FROM {lookup.TableInfo.Name}", DateTime.Now)
2✔
373
            }, -1);
2✔
374
            tableLoadInfo.Inserts = linesWritten;
2✔
375
            tableLoadInfo.CloseAndArchive();
2✔
376

377
            //audit in cumulative extraction results (determines release-ability of artifacts).
378
            if (_request is ExtractDatasetCommand command)
2✔
379
            {
380
                var result = command.CumulativeExtractionResults;
2✔
381
                var supplementalResult = result.AddSupplementalExtractionResult(
2✔
382
                    $"SELECT * FROM {lookup.TableInfo.Name}", lookup.TableInfo);
2✔
383
                supplementalResult.CompleteAudit(GetType(), destinationDescription, linesWritten, false, false);
2✔
384
            }
385

386
            return true;
2✔
387
        }
388
        catch (Exception e)
×
389
        {
390
            job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error,
×
391
                $"Error occurred trying to extract lookup {lookup} on server {lookup.TableInfo.Server}", e));
×
392

393
            return false;
×
394
        }
395
    }
2✔
396

397
    /// <summary>
398
    /// Extracts the <paramref name="doc"/> into the supplied <paramref name="directory"/> (unless overridden to put it somewhere else)
399
    /// </summary>
400
    /// <param name="doc"></param>
401
    /// <param name="directory"></param>
402
    /// <param name="listener"></param>
403
    /// <returns></returns>
404
    protected virtual bool TryExtractSupportingDocument(SupportingDocument doc, DirectoryInfo directory,
405
        IDataLoadEventListener listener)
406
    {
407
        var fetcher = new SupportingDocumentsFetcher(doc);
4✔
408

409
        listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information,
4✔
410
            $"Preparing to copy {doc} to directory {directory.FullName}"));
4✔
411
        try
412
        {
413
            var outputPath = fetcher.ExtractToDirectory(directory);
4✔
414
            if (_request is ExtractDatasetCommand command)
4✔
415
            {
416
                var result = command.CumulativeExtractionResults;
2✔
417
                var supplementalResult = result.AddSupplementalExtractionResult(null, doc);
2✔
418
                supplementalResult.CompleteAudit(GetType(), outputPath, 0, false, false);
2✔
419
            }
420
            else
421
            {
422
                var extractGlobalsCommand = _request as ExtractGlobalsCommand;
2✔
423
                Debug.Assert(extractGlobalsCommand != null, "extractGlobalsCommand != null");
424
                var result = new SupplementalExtractionResults(
2✔
425
                    extractGlobalsCommand.RepositoryLocator.DataExportRepository,
2✔
426
                    extractGlobalsCommand.Configuration,
2✔
427
                    null,
2✔
428
                    doc);
2✔
429
                result.CompleteAudit(GetType(), outputPath, 0, false, false);
2✔
430
                extractGlobalsCommand.ExtractionResults.Add(result);
2✔
431
            }
432

433
            return true;
4✔
434
        }
435
        catch (Exception e)
×
436
        {
437
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error,
×
438
                $"Failed to copy file {doc} to directory {directory.FullName}", e));
×
439
            return false;
×
440
        }
441
    }
4✔
442

443
    protected bool TryExtractSupportingSQLTable(SupportingSQLTable sql, DirectoryInfo directory,
444
        IExtractionConfiguration configuration, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
445
    {
446
        try
447
        {
448
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information,
4✔
449
                $"Preparing to extract Supporting SQL {sql} to directory {directory.FullName}"));
4✔
450

451
            var sw = new Stopwatch();
4✔
452
            sw.Start();
4✔
453

454
            //start auditing it as a table load
455
            var target = Path.Combine(directory.FullName, $"{sql.Name}.csv");
4✔
456
            var tableLoadInfo =
4✔
457
                dataLoadInfo.CreateTableLoadInfo("", target, new[] { new DataSource(sql.SQL, DateTime.Now) }, -1);
4✔
458

459
            TryExtractSupportingSQLTableImpl(sql, directory, configuration, listener, out var sqlLinesWritten,
4✔
460
                out var description);
4✔
461

462
            sw.Stop();
4✔
463

464
            //end auditing it
465
            tableLoadInfo.Inserts = sqlLinesWritten;
4✔
466
            tableLoadInfo.CloseAndArchive();
4✔
467

468
            if (_request is ExtractDatasetCommand command)
4✔
469
            {
470
                var result = command.CumulativeExtractionResults;
2✔
471
                var supplementalResult = result.AddSupplementalExtractionResult(sql.SQL, sql);
2✔
472
                supplementalResult.CompleteAudit(GetType(), description, sqlLinesWritten, false, false);
2✔
473
            }
474
            else
475
            {
476
                var extractGlobalsCommand = _request as ExtractGlobalsCommand;
2✔
477
                Debug.Assert(extractGlobalsCommand != null, "extractGlobalsCommand != null");
478
                var result =
2✔
479
                    new SupplementalExtractionResults(extractGlobalsCommand.RepositoryLocator.DataExportRepository,
2✔
480
                        extractGlobalsCommand.Configuration,
2✔
481
                        sql.SQL,
2✔
482
                        sql);
2✔
483
                result.CompleteAudit(GetType(), description, sqlLinesWritten, false, false);
2✔
484
                extractGlobalsCommand.ExtractionResults.Add(result);
2✔
485
            }
486

487
            listener.OnProgress(this,
4✔
488
                new ProgressEventArgs($"Extract {sql}", new ProgressMeasurement(sqlLinesWritten, ProgressType.Records),
4✔
489
                    sw.Elapsed));
4✔
490
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information,
4✔
491
                $"Extracted {sqlLinesWritten} records from SupportingSQL {sql} into directory {directory.FullName}"));
4✔
492

493
            return true;
4✔
494
        }
495
        catch (Exception e)
×
496
        {
497
            if (e is SqlException)
×
498
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error,
×
499
                    $"Failed to run extraction SQL (make sure to fully specify all database/table/column objects completely):{Environment.NewLine}{sql.SQL}",
×
500
                    e));
×
501
            else
502
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error,
×
503
                    $"Failed to extract {sql} into directory {directory.FullName}", e));
×
504

505
            return false;
×
506
        }
507
    }
4✔
508

509
    protected virtual void TryExtractSupportingSQLTableImpl(SupportingSQLTable sqlTable, DirectoryInfo directory,
510
        IExtractionConfiguration configuration, IDataLoadEventListener listener, out int linesWritten,
511
        out string destinationDescription)
512
    {
513
        var extractor = new ExtractTableVerbatim(sqlTable.GetServer(), sqlTable.SQL, sqlTable.Name, directory,
×
514
            configuration.Separator, DateFormat);
×
515
        linesWritten = extractor.DoExtraction();
×
516
        destinationDescription = extractor.OutputFilename;
×
517
    }
×
518

519
    protected virtual void TryExtractLookupTableImpl(BundledLookupTable lookup, DirectoryInfo lookupDir,
520
        IExtractionConfiguration requestConfiguration, IDataLoadEventListener listener, out int linesWritten,
521
        out string destinationDescription)
522
    {
523
        //extract the lookup table SQL
524
        var sql = lookup.GetDataTableFetchSql();
×
525

526
        var extractTableVerbatim = new ExtractTableVerbatim(
×
527
            lookup.TableInfo.Discover(DataAccessContext.DataExport).Database.Server,
×
528
            sql, lookup.TableInfo.GetRuntimeName(), lookupDir, _request.Configuration.Separator, DateFormat);
×
529

530
        linesWritten = extractTableVerbatim.DoExtraction();
×
531
        destinationDescription = extractTableVerbatim.OutputFilename;
×
532
    }
×
533

534
    #endregion
535
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc