• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pysat / pysatSpaceWeather / 5014980996

pending completion
5014980996

Pull #116

github

GitHub
Merge 68bcfd489 into 0c2adccf8
Pull Request #116: RC v0.0.10

19 of 19 new or added lines in 5 files covered. (100.0%)

1809 of 2035 relevant lines covered (88.89%)

7.11 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

65.94
/pysatSpaceWeather/instruments/sw_f107.py
1
# -*- coding: utf-8 -*-
2
"""Supports F10.7 index values. Downloads data from LASP and the SWPC.
8✔
3

4
Properties
5
----------
6
platform
7
    'sw'
8
name
9
    'f107'
10
tag
11
    - 'historic' LASP F10.7 data (downloads by month, loads by day)
12
    - 'prelim' Preliminary SWPC daily solar indices
13
    - 'daily' Daily SWPC solar indices (contains last 30 days)
14
    - 'forecast' Grab forecast data from SWPC (next 3 days)
15
    - '45day' 45-Day Forecast data from the Air Force
16

17
Examples
18
--------
19
Download and load all of the historic F10.7 data.  Note that it will not
20
stop on the current date, but a point in the past when post-processing has
21
been successfully completed.
22
::
23

24
    f107 = pysat.Instrument('sw', 'f107', tag='historic')
25
    f107.download(start=f107.lasp_stime, stop=f107.today())
26
    f107.load(date=f107.lasp_stime, end_date=f107.today())
27

28

29
Note
30
----
31
The forecast data is stored by generation date, where each file contains the
32
forecast for the next three days. Forecast data downloads are only supported
33
for the current day. When loading forecast data, the date specified with the
34
load command is the date the forecast was generated. The data loaded will span
35
three days. To always ensure you are loading the most recent data, load
36
the data with tomorrow's date.
37
::
38

39
    f107 = pysat.Instrument('sw', 'f107', tag='forecast')
40
    f107.download()
41
    f107.load(date=f107.tomorrow())
42

43

44
Warnings
45
--------
46
The 'forecast' F10.7 data loads three days at a time. Loading multiple files,
47
loading multiple days, the data padding feature, and multi_file_day feature
48
available from the pyast.Instrument object is not appropriate for 'forecast'
49
data.
50

51
Like 'forecast', the '45day' forecast loads a specific period of time (45 days)
52
and subsequent files contain overlapping data.  Thus, loading multiple files,
53
loading multiple days, the data padding feature, and multi_file_day feature
54
available from the pyast.Instrument object is not appropriate for '45day' data.
55

56
"""
57

58
import datetime as dt
8✔
59
import ftplib
8✔
60
import numpy as np
8✔
61
import os
8✔
62
import pandas as pds
8✔
63
import pysat
8✔
64
import requests
8✔
65
import sys
8✔
66
import warnings
8✔
67

68
from pysatSpaceWeather.instruments.methods import f107 as mm_f107
8✔
69
from pysatSpaceWeather.instruments.methods import general
8✔
70
from pysatSpaceWeather.instruments.methods import lisird
8✔
71

72
logger = pysat.logger
8✔
73

74
# ----------------------------------------------------------------------------
75
# Instrument attributes
76

77
platform = 'sw'
8✔
78
name = 'f107'
8✔
79
tags = {'historic': 'Daily LASP value of F10.7',
8✔
80
        'prelim': 'Preliminary SWPC daily solar indices',
81
        'daily': 'Daily SWPC solar indices (contains last 30 days)',
82
        'forecast': 'SWPC Forecast F107 data next (3 days)',
83
        '45day': 'Air Force 45-day Forecast'}
84

85
# Dict keyed by inst_id that lists supported tags for each inst_id
86
inst_ids = {'': [tag for tag in tags.keys()]}
8✔
87

88
# Dict keyed by inst_id that lists supported tags and a good day of test data
89
# generate todays date to support loading forecast data
90
now = dt.datetime.utcnow()
8✔
91
today = dt.datetime(now.year, now.month, now.day)
8✔
92
tomorrow = today + pds.DateOffset(days=1)
8✔
93

94
# The LASP archive start day is also important
95
lasp_stime = dt.datetime(1947, 2, 14)
8✔
96

97
# ----------------------------------------------------------------------------
98
# Instrument test attributes
99

100
_test_dates = {'': {'historic': dt.datetime(2009, 1, 1),
8✔
101
                    'prelim': dt.datetime(2009, 1, 1),
102
                    'daily': tomorrow,
103
                    'forecast': tomorrow,
104
                    '45day': tomorrow}}
105

106
# Other tags assumed to be True
107
_test_download_ci = {'': {'prelim': False}}
8✔
108

109
# ----------------------------------------------------------------------------
110
# Instrument methods
111

112
preprocess = general.preprocess
8✔
113

114

115
def init(self):
8✔
116
    """Initialize the Instrument object with instrument specific values."""
117

118
    # Set the required Instrument attributes
119
    self.acknowledgements = mm_f107.acknowledgements(self.tag)
8✔
120
    self.references = mm_f107.references(self.tag)
8✔
121
    logger.info(self.acknowledgements)
8✔
122

123
    # Define the historic F10.7 starting time
124
    if self.tag == 'historic':
8✔
125
        self.lasp_stime = lasp_stime
8✔
126

127
    # Raise Deprecation warnings
128
    if self.tag in ['daily', 'prelim']:
8✔
129
        # This tag loads more than just F10.7 data, and the behaviour will be
130
        # deprecated in v0.1.0
131
        warnings.warn("".join(["Upcoming structural changes will prevent ",
8✔
132
                               "Instruments from loading multiple data sets in",
133
                               " one Instrument. In version 0.1.0+ the SSN, ",
134
                               "solar flare, and solar mean field data will be",
135
                               " accessable from the `sw_ssn`, `sw_flare`, ",
136
                               "and `sw_sbfield` Instruments."]),
137
                      DeprecationWarning, stacklevel=2)
138
    elif self.tag == '45day':
8✔
139
        # This tag loads more than just F10.7 data, and the behaviour will be
140
        # deprecated in v0.1.0
141
        warnings.warn("".join(["Upcoming structural changes will prevent ",
8✔
142
                               "Instruments from loading multiple data sets in",
143
                               " one Instrument. In version 0.1.0+ the Ap will",
144
                               " be accessable from the `sw_ap` Instrument."]),
145
                      DeprecationWarning, stacklevel=2)
146

147
    return
8✔
148

149

150
def clean(self):
8✔
151
    """Clean the F10.7 data, empty function as this is not necessary."""
152

153
    return
8✔
154

155

156
# ----------------------------------------------------------------------------
157
# Instrument functions
158

159

160
def load(fnames, tag='', inst_id=''):
8✔
161
    """Load F10.7 index files.
162

163
    Parameters
164
    ----------
165
    fnames : pandas.Series
166
        Series of filenames.
167
    tag : str
168
        Instrument tag. (default='')
169
    inst_id : str
170
        Instrument ID, not used. (default='')
171

172
    Returns
173
    -------
174
    data : pandas.DataFrame
175
        Object containing satellite data.
176
    meta : pysat.Meta
177
        Object containing metadata such as column names and units.
178

179
    See Also
180
    --------
181
    pysat.instruments.methods.general.load_csv_data
182

183
    Note
184
    ----
185
    Called by pysat. Not intended for direct use by user.
186

187
    """
188

189
    # Get the desired file dates and file names from the daily indexed list
190
    file_dates = list()
8✔
191
    if tag in ['historic', 'prelim']:
8✔
192
        unique_files = list()
8✔
193
        for fname in fnames:
8✔
194
            file_dates.append(dt.datetime.strptime(fname[-10:], '%Y-%m-%d'))
8✔
195
            if fname[0:-11] not in unique_files:
8✔
196
                unique_files.append(fname[0:-11])
8✔
197
        fnames = unique_files
8✔
198

199
    # Load the CSV data files
200
    data = pysat.instruments.methods.general.load_csv_data(
8✔
201
        fnames, read_csv_kwargs={"index_col": 0, "parse_dates": True})
202

203
    # If there is a date range, downselect here
204
    if len(file_dates) > 0:
8✔
205
        idx, = np.where((data.index >= min(file_dates))
8✔
206
                        & (data.index < max(file_dates) + dt.timedelta(days=1)))
207
        data = data.iloc[idx, :]
8✔
208

209
    # Initialize the metadata
210
    meta = pysat.Meta()
8✔
211
    meta['f107'] = {meta.labels.units: 'SFU',
8✔
212
                    meta.labels.name: 'F10.7 cm solar index',
213
                    meta.labels.notes: '',
214
                    meta.labels.desc:
215
                    'F10.7 cm radio flux in Solar Flux Units (SFU)',
216
                    meta.labels.fill_val: np.nan,
217
                    meta.labels.min_val: 0,
218
                    meta.labels.max_val: np.inf}
219

220
    if tag == '45day':
8✔
221
        meta['ap'] = {meta.labels.units: '',
8✔
222
                      meta.labels.name: 'Daily Ap index',
223
                      meta.labels.notes: '',
224
                      meta.labels.desc: 'Daily average of 3-h ap indices',
225
                      meta.labels.fill_val: np.nan,
226
                      meta.labels.min_val: 0,
227
                      meta.labels.max_val: 400}
228
    elif tag == 'historic':
8✔
229
        # LASP updated file format in June, 2022. Minimize impact downstream by
230
        # continuing use of `f107` as primary data product.
231
        if 'f107_adjusted' in data.columns:
8✔
232
            # There may be a mix of old and new data formats.
233
            if 'f107' in data.columns:
8✔
234
                # Only fill NaN in the `f107` and `f107_adjusted` columns
235
                # for consistency across both data sets
236
                data.loc[np.isnan(data['f107']), 'f107'] = data.loc[
×
237
                    np.isnan(data['f107']), 'f107_adjusted']
238

239
                data.loc[np.isnan(data['f107_adjusted']),
×
240
                         'f107_adjusted'] = data.loc[
241
                             np.isnan(data['f107_adjusted']), 'f107']
242
            else:
243
                data['f107'] = data['f107_adjusted']
8✔
244

245
            # Add metadata
246
            meta['f107_observed'] = meta['f107']
8✔
247
            raw_str = 'Raw F10.7 cm radio flux in Solar Flux Units (SFU)'
8✔
248
            meta['f107_observed'] = {meta.labels.desc: raw_str}
8✔
249

250
            meta['f107_adjusted'] = meta['f107_observed']
8✔
251
            norm_str = ''.join(['F10.7 cm radio flux in Solar Flux Units (SFU)',
8✔
252
                                ' normalized to 1-AU'])
253
            meta['f107_adjusted'] = {meta.labels.desc: norm_str}
8✔
254

255
            meta['f107'] = {
8✔
256
                meta.labels.desc: meta['f107_adjusted', meta.labels.desc]}
257

258
    elif tag == 'daily' or tag == 'prelim':
8✔
259
        # Update the allowed types for the fill value
260
        meta.labels.label_type['fill_val'] = (float, int, np.float64,
8✔
261
                                              np.int64, str)
262

263
        meta['ssn'] = {meta.labels.units: '',
8✔
264
                       meta.labels.name: 'Sunspot Number',
265
                       meta.labels.notes: '',
266
                       meta.labels.desc: 'SESC Sunspot Number',
267
                       meta.labels.fill_val: -999,
268
                       meta.labels.min_val: 0,
269
                       meta.labels.max_val: np.inf}
270
        meta['ss_area'] = {meta.labels.units: '10$^-6$ Solar Hemisphere',
8✔
271
                           meta.labels.name: 'Sunspot Area',
272
                           meta.labels.notes: '',
273
                           meta.labels.desc:
274
                           ''.join(['Sunspot Area in Millionths of the ',
275
                                    'Visible Hemisphere']),
276
                           meta.labels.fill_val: -999,
277
                           meta.labels.min_val: 0,
278
                           meta.labels.max_val: 1.0e6}
279
        meta['new_reg'] = {meta.labels.units: '',
8✔
280
                           meta.labels.name: 'New Regions',
281
                           meta.labels.notes: '',
282
                           meta.labels.desc: 'New active solar regions',
283
                           meta.labels.fill_val: -999,
284
                           meta.labels.min_val: 0,
285
                           meta.labels.max_val: np.inf}
286
        meta['smf'] = {meta.labels.units: 'G',
8✔
287
                       meta.labels.name: 'Solar Mean Field',
288
                       meta.labels.notes: '',
289
                       meta.labels.desc: 'Standford Solar Mean Field',
290
                       meta.labels.fill_val: -999,
291
                       meta.labels.min_val: 0,
292
                       meta.labels.max_val: np.inf}
293
        meta['goes_bgd_flux'] = {meta.labels.units: 'W/m^2',
8✔
294
                                 meta.labels.name: 'X-ray Background Flux',
295
                                 meta.labels.notes: '',
296
                                 meta.labels.desc:
297
                                 'GOES15 X-ray Background Flux',
298
                                 meta.labels.fill_val: '*',
299
                                 meta.labels.min_val: -np.inf,
300
                                 meta.labels.max_val: np.inf}
301
        meta['c_flare'] = {meta.labels.units: '',
8✔
302
                           meta.labels.name: 'C X-Ray Flares',
303
                           meta.labels.notes: '',
304
                           meta.labels.desc: 'C-class X-Ray Flares',
305
                           meta.labels.fill_val: -1,
306
                           meta.labels.min_val: 0,
307
                           meta.labels.max_val: 9}
308
        meta['m_flare'] = {meta.labels.units: '',
8✔
309
                           meta.labels.name: 'M X-Ray Flares',
310
                           meta.labels.notes: '',
311
                           meta.labels.desc: 'M-class X-Ray Flares',
312
                           meta.labels.fill_val: -1,
313
                           meta.labels.min_val: 0,
314
                           meta.labels.max_val: 9}
315
        meta['x_flare'] = {meta.labels.units: '',
8✔
316
                           meta.labels.name: 'X X-Ray Flares',
317
                           meta.labels.notes: '',
318
                           meta.labels.desc: 'X-class X-Ray Flares',
319
                           meta.labels.fill_val: -1,
320
                           meta.labels.min_val: 0,
321
                           meta.labels.max_val: 9}
322
        meta['o1_flare'] = {meta.labels.units: '',
8✔
323
                            meta.labels.name: '1 Optical Flares',
324
                            meta.labels.notes: '',
325
                            meta.labels.desc: '1-class Optical Flares',
326
                            meta.labels.fill_val: -1,
327
                            meta.labels.min_val: 0,
328
                            meta.labels.max_val: 9}
329
        meta['o2_flare'] = {meta.labels.units: '',
8✔
330
                            meta.labels.name: '2 Optical Flares',
331
                            meta.labels.notes: '',
332
                            meta.labels.desc: '2-class Optical Flares',
333
                            meta.labels.fill_val: -1,
334
                            meta.labels.min_val: 0,
335
                            meta.labels.max_val: 9}
336
        meta['o3_flare'] = {meta.labels.units: '',
8✔
337
                            meta.labels.name: '3 Optical Flares',
338
                            meta.labels.notes: '',
339
                            meta.labels.desc: '3-class Optical Flares',
340
                            meta.labels.fill_val: -1,
341
                            meta.labels.min_val: 0,
342
                            meta.labels.max_val: 9}
343

344
    return data, meta
8✔
345

346

347
def list_files(tag='', inst_id='', data_path='', format_str=None):
8✔
348
    """List local F10.7 data files.
349

350
    Parameters
351
    ----------
352
    tag : str
353
        Instrument tag, accepts any value from `tags`. (default='')
354
    inst_id : str
355
        Instrument ID, not used. (default='')
356
    data_path : str
357
        Path to data directory. (default='')
358
    format_str : str or NoneType
359
        User specified file format.  If None is specified, the default
360
        formats associated with the supplied tags are used. (default=None)
361

362
    Returns
363
    -------
364
    out_files : pysat._files.Files
365
        A class containing the verified available files
366

367
    Note
368
    ----
369
    Called by pysat. Not intended for direct use by user.
370

371
    """
372

373
    if tag == 'historic':
8✔
374
        # Files are by month, going to add date to monthly filename for
375
        # each day of the month. The load routine will load a month of
376
        # data and use the appended date to select out appropriate data.
377
        if format_str is None:
8✔
378
            format_str = 'f107_monthly_{year:04d}-{month:02d}.txt'
8✔
379
        out_files = pysat.Files.from_os(data_path=data_path,
8✔
380
                                        format_str=format_str)
381
        if not out_files.empty:
8✔
382
            out_files.loc[out_files.index[-1] + pds.DateOffset(months=1)
8✔
383
                          - pds.DateOffset(days=1)] = out_files.iloc[-1]
384
            out_files = out_files.asfreq('D', 'pad')
8✔
385
            out_files = out_files + '_' + out_files.index.strftime(
8✔
386
                '%Y-%m-%d')
387

388
    elif tag == 'prelim':
8✔
389
        # Files are by year (and quarter)
390
        if format_str is None:
8✔
391
            format_str = ''.join(['f107_prelim_{year:04d}_{month:02d}',
8✔
392
                                  '_v{version:01d}.txt'])
393
        out_files = pysat.Files.from_os(data_path=data_path,
8✔
394
                                        format_str=format_str)
395

396
        if not out_files.empty:
8✔
397
            # Set each file's valid length at a 1-day resolution
398
            orig_files = out_files.sort_index().copy()
×
399
            new_files = list()
×
400

401
            for orig in orig_files.items():
×
402
                # Version determines each file's valid length
403
                version = np.int64(orig[1].split("_v")[1][0])
×
404
                doff = pds.DateOffset(years=1) if version == 2 \
×
405
                    else pds.DateOffset(months=3)
406
                istart = orig[0]
×
407
                iend = istart + doff - pds.DateOffset(days=1)
×
408

409
                # Ensure the end time does not extend past the number of
410
                # possible days included based on the file's download time
411
                fname = os.path.join(data_path, orig[1])
×
412
                dend = dt.datetime.utcfromtimestamp(os.path.getctime(fname))
×
413
                dend = dend - pds.DateOffset(days=1)
×
414
                if dend < iend:
×
415
                    iend = dend
×
416

417
                # Pad the original file index
418
                out_files.loc[iend] = orig[1]
×
419
                out_files = out_files.sort_index()
×
420

421
                # Save the files at a daily cadence over the desired period
422
                new_files.append(out_files.loc[istart:
×
423
                                               iend].asfreq('D', 'pad'))
424
            # Add the newly indexed files to the file output
425
            out_files = pds.concat(new_files, sort=True)
×
426
            out_files = out_files.dropna()
×
427
            out_files = out_files.sort_index()
×
428
            out_files = out_files + '_' + out_files.index.strftime('%Y-%m-%d')
×
429

430
    elif tag in ['daily', 'forecast', '45day']:
8✔
431
        format_str = ''.join(['f107_', tag,
8✔
432
                              '_{year:04d}-{month:02d}-{day:02d}.txt'])
433
        out_files = pysat.Files.from_os(data_path=data_path,
8✔
434
                                        format_str=format_str)
435

436
        # Pad list of files data to include most recent file under tomorrow
437
        if not out_files.empty:
8✔
438
            pds_off = pds.DateOffset(days=1)
8✔
439
            out_files.loc[out_files.index[-1] + pds_off] = out_files.values[-1]
8✔
440
            out_files.loc[out_files.index[-1] + pds_off] = out_files.values[-1]
8✔
441

442
    return out_files
8✔
443

444

445
def download(date_array, tag, inst_id, data_path, update_files=False):
8✔
446
    """Download F107 index data from the appropriate repository.
447

448
    Parameters
449
    ----------
450
    date_array : array-like
451
        Sequence of dates for which files will be downloaded.
452
    tag : str
453
        Denotes type of file to load.
454
    inst_id : str
455
        Specifies the satellite ID for a constellation.
456
    data_path : str
457
        Path to data directory.
458
    update_files : bool
459
        Re-download data for files that already exist if True (default=False)
460

461
    Raises
462
    ------
463
    IOError
464
        If a problem is encountered connecting to the gateway or retrieving
465
        data from the repository.
466

467
    Warnings
468
    --------
469
    Only able to download current forecast data, not archived forecasts.
470

471
    Note
472
    ----
473
    Called by pysat. Not intended for direct use by user.
474

475
    """
476
    # Download standard F107 data
477
    if tag == 'historic':
8✔
478
        # Test the date array, updating it if necessary
479
        if date_array.freq != 'MS':
8✔
480
            date_array = pysat.utils.time.create_date_range(
8✔
481
                dt.datetime(date_array[0].year, date_array[0].month, 1),
482
                date_array[-1], freq='MS')
483

484
        # Download from LASP, by month
485
        freq = pds.DateOffset(months=1, seconds=-1)
8✔
486
        lisird.download(date_array, data_path, 'f107_monthly_', '%Y-%m',
8✔
487
                        'noaa_radio_flux', freq, update_files,
488
                        {'f107_adjusted': -99999.0, 'f107_observed': -99999.0})
489

490
    elif tag == 'prelim':
8✔
491
        ftp = ftplib.FTP('ftp.swpc.noaa.gov')  # Connect to host, default port
×
492
        ftp.login()  # User anonymous, passwd anonymous
×
493
        ftp.cwd('/pub/indices/old_indices')
×
494

495
        bad_fname = list()
×
496

497
        # Get the local files, to ensure that the version 1 files are
498
        # downloaded again if more data has been added
499
        local_files = list_files(tag, inst_id, data_path)
×
500

501
        # Cut the date from the end of the local files
502
        for i, lfile in enumerate(local_files):
×
503
            local_files[i] = lfile[:-11]
×
504

505
        # To avoid downloading multiple files, cycle dates based on file length
506
        dl_date = date_array[0]
×
507
        while dl_date <= date_array[-1]:
×
508
            # The file name changes, depending on how recent the requested
509
            # data is
510
            qnum = (dl_date.month - 1) // 3 + 1  # Integer floor division
×
511
            qmonth = (qnum - 1) * 3 + 1
×
512
            quar = 'Q{:d}_'.format(qnum)
×
513
            fnames = ['{:04d}{:s}DSD.txt'.format(dl_date.year, ss)
×
514
                      for ss in ['_', quar]]
515
            versions = ["01_v2", "{:02d}_v1".format(qmonth)]
×
516
            vend = [dt.datetime(dl_date.year, 12, 31),
×
517
                    dt.datetime(dl_date.year, qmonth, 1)
518
                    + pds.DateOffset(months=3) - pds.DateOffset(days=1)]
519
            downloaded = False
×
520
            rewritten = False
×
521

522
            # Attempt the download(s)
523
            for iname, fname in enumerate(fnames):
×
524
                # Test to see if we already tried this filename
525
                if fname in bad_fname:
×
526
                    continue
×
527

528
                local_fname = fname
×
529
                saved_fname = os.path.join(data_path, local_fname)
×
530
                ofile = '_'.join(['f107', 'prelim',
×
531
                                  '{:04d}'.format(dl_date.year),
532
                                  '{:s}.txt'.format(versions[iname])])
533
                outfile = os.path.join(data_path, ofile)
×
534

535
                if os.path.isfile(outfile):
×
536
                    downloaded = True
×
537

538
                    # Check the date to see if this should be rewritten
539
                    checkfile = os.path.split(outfile)[-1]
×
540
                    has_file = local_files == checkfile
×
541
                    if np.any(has_file):
×
542
                        if has_file[has_file].index[-1] < vend[iname]:
×
543
                            # This file will be updated again, but only attempt
544
                            # to do so if enough time has passed from the
545
                            # last time it was downloaded
546
                            yesterday = today - pds.DateOffset(days=1)
×
547
                            if has_file[has_file].index[-1] < yesterday:
×
548
                                rewritten = True
×
549
                else:
550
                    # The file does not exist, if it can be downloaded, it
551
                    # should be 'rewritten'
552
                    rewritten = True
×
553

554
                # Attempt to download if the file does not exist or if the
555
                # file has been updated
556
                if rewritten or not downloaded:
×
557
                    try:
×
558
                        sys.stdout.flush()
×
559
                        ftp.retrbinary('RETR ' + fname,
×
560
                                       open(saved_fname, 'wb').write)
561
                        downloaded = True
×
562
                        logger.info(' '.join(('Downloaded file for ',
×
563
                                              dl_date.strftime('%x'))))
564

565
                    except ftplib.error_perm as exception:
×
566
                        # Could not fetch, so cannot rewrite
567
                        rewritten = False
×
568

569
                        # Test for an error
570
                        if str(exception.args[0]).split(" ", 1)[0] != '550':
×
571
                            raise IOError(exception)
×
572
                        else:
573
                            # file isn't actually there, try the next name
574
                            os.remove(saved_fname)
×
575

576
                            # Save this so we don't try again
577
                            # Because there are two possible filenames for
578
                            # each time, it's ok if one isn't there.  We just
579
                            # don't want to keep looking for it.
580
                            bad_fname.append(fname)
×
581

582
                # If the first file worked, don't try again
583
                if downloaded:
×
584
                    break
×
585

586
            if not downloaded:
×
587
                logger.info(' '.join(('File not available for',
×
588
                                      dl_date.strftime('%x'))))
589
            elif rewritten:
×
590
                with open(saved_fname, 'r') as fprelim:
×
591
                    lines = fprelim.read()
×
592

593
                mm_f107.rewrite_daily_file(dl_date.year, outfile, lines)
×
594
                os.remove(saved_fname)
×
595

596
            # Cycle to the next date
597
            dl_date = vend[iname] + pds.DateOffset(days=1)
×
598

599
        # Close connection after downloading all dates
600
        ftp.close()
×
601

602
    elif tag == 'daily':
8✔
603
        logger.info('This routine can only download the latest 30 day file')
8✔
604

605
        # Set the download webpage
606
        furl = 'https://services.swpc.noaa.gov/text/daily-solar-indices.txt'
8✔
607
        req = requests.get(furl)
8✔
608

609
        # Save the output
610
        data_file = 'f107_daily_{:s}.txt'.format(today.strftime('%Y-%m-%d'))
8✔
611
        outfile = os.path.join(data_path, data_file)
8✔
612
        mm_f107.rewrite_daily_file(today.year, outfile, req.text)
8✔
613

614
    elif tag == 'forecast':
8✔
615
        logger.info(' '.join(('This routine can only download the current',
8✔
616
                              'forecast, not archived forecasts')))
617
        # Set the download webpage
618
        furl = ''.join(('https://services.swpc.noaa.gov/text/',
8✔
619
                        '3-day-solar-geomag-predictions.txt'))
620
        req = requests.get(furl)
8✔
621

622
        # Parse text to get the date the prediction was generated
623
        date_str = req.text.split(':Issued: ')[-1].split(' UTC')[0]
8✔
624
        dl_date = dt.datetime.strptime(date_str, '%Y %b %d %H%M')
8✔
625

626
        # Get starting date of the forecasts
627
        raw_data = req.text.split(':Prediction_dates:')[-1]
8✔
628
        forecast_date = dt.datetime.strptime(raw_data[3:14], '%Y %b %d')
8✔
629

630
        # Set the times for output data
631
        times = pds.date_range(forecast_date, periods=3, freq='1D')
8✔
632

633
        # String data is the forecast value for the next three days
634
        raw_data = req.text.split('10cm_flux:')[-1]
8✔
635
        raw_data = raw_data.split('\n')[1]
8✔
636
        val1 = np.int64(raw_data[24:27])
8✔
637
        val2 = np.int64(raw_data[38:41])
8✔
638
        val3 = np.int64(raw_data[52:])
8✔
639

640
        # Put data into nicer DataFrame
641
        data = pds.DataFrame([val1, val2, val3], index=times, columns=['f107'])
8✔
642

643
        # Write out as a file
644
        data_file = 'f107_forecast_{:s}.txt'.format(
8✔
645
            dl_date.strftime('%Y-%m-%d'))
646
        data.to_csv(os.path.join(data_path, data_file), header=True)
8✔
647

648
    elif tag == '45day':
8✔
649
        logger.info(' '.join(('This routine can only download the current',
8✔
650
                              'forecast, not archived forecasts')))
651

652
        # Set the download webpage
653
        furl = 'https://services.swpc.noaa.gov/text/45-day-ap-forecast.txt'
8✔
654
        req = requests.get(furl)
8✔
655

656
        # Parse text to get the date the prediction was generated
657
        date_str = req.text.split(':Issued: ')[-1].split(' UTC')[0]
8✔
658
        dl_date = dt.datetime.strptime(date_str, '%Y %b %d %H%M')
8✔
659

660
        # Get to the forecast data
661
        raw_data = req.text.split('45-DAY AP FORECAST')[-1]
8✔
662

663
        # Grab AP part
664
        raw_ap = raw_data.split('45-DAY F10.7 CM FLUX FORECAST')[0]
8✔
665
        raw_ap = raw_ap.split('\n')[1:-1]
8✔
666

667
        # Get the F107
668
        raw_f107 = raw_data.split('45-DAY F10.7 CM FLUX FORECAST')[-1]
8✔
669
        raw_f107 = raw_f107.split('\n')[1:-4]
8✔
670

671
        # Parse the AP data
672
        ap_times, ap = mm_f107.parse_45day_block(raw_ap)
8✔
673

674
        # Parse the F10.7 data
675
        f107_times, f107 = mm_f107.parse_45day_block(raw_f107)
8✔
676

677
        # Collect into DataFrame
678
        data = pds.DataFrame(f107, index=f107_times, columns=['f107'])
8✔
679
        data['ap'] = ap
8✔
680

681
        # Write out as a file
682
        data_file = 'f107_45day_{:s}.txt'.format(dl_date.strftime('%Y-%m-%d'))
8✔
683
        data.to_csv(os.path.join(data_path, data_file), header=True)
8✔
684

685
    return
8✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc