• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

pysat / pysat / 13036165242

29 Jan 2025 04:44PM UTC coverage: 97.404%. Remained the same
13036165242

push

github

web-flow
Merge pull request #1211 from pysat/control_distribution_statement

LGL: Control distribution statement

11780 of 12094 relevant lines covered (97.4%)

7.73 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.97
pysat/instruments/methods/testing.py
1
#!/usr/bin/env python
2
# Full license can be found in License.md
3
# Full author list can be found in .zenodo.json file
4
# DOI:10.5281/zenodo.1199703
5
#
6
# Review Status for Classified or Controlled Information by NRL
7
# -------------------------------------------------------------
8
# DISTRIBUTION STATEMENT A: Approved for public release. Distribution is
9
# unlimited.
10
# ----------------------------------------------------------------------------
11
"""Standard functions for the test instruments."""
1✔
12

13
import datetime as dt
8✔
14
import os
8✔
15

16
import numpy as np
8✔
17
import pandas as pds
8✔
18
import time
8✔
19
import warnings
8✔
20
import xarray as xr
8✔
21

22
import pysat
8✔
23
from pysat.utils import NetworkLock
8✔
24
from pysat.utils import time as putime
8✔
25

26
ackn_str = ' '.join(("Test instruments provided through the pysat project.",
8✔
27
                     "https://www.github.com/pysat/pysat"))
28

29
# Load up citation information
30
with pysat.utils.NetworkLock(pysat.citation, 'r') as locked_file:
8✔
31
    refs = locked_file.read()
8✔
32

33

34
def init(self, test_init_kwarg=None):
8✔
35
    """Initialize the Instrument object with instrument specific values.
36

37
    Runs once upon instantiation.
38

39
    Shifts time index of files by 5-minutes if `mangle_file_dates`
40
    set to True at pysat.Instrument instantiation.
41

42
    Creates a file list for a given range if the `file_date_range`
43
    keyword is set at instantiation.
44

45
    Parameters
46
    ----------
47
    test_init_kwarg : any
48
        Testing keyword (default=None)
49

50
    """
51

52
    pysat.logger.info(ackn_str)
8✔
53
    self.acknowledgements = ackn_str
8✔
54
    self.references = refs
8✔
55

56
    # Assign parameters for testing purposes
57
    self.new_thing = True
8✔
58
    self.test_init_kwarg = test_init_kwarg
8✔
59

60
    return
8✔
61

62

63
def clean(self, test_clean_kwarg=None):
8✔
64
    """Pass through when asked to clean a test instrument.
65

66
    Parameters
67
    ----------
68
    test_clean_kwarg : any
69
        Testing keyword. If these keywords contain 'logger', 'warning', or
70
        'error', the message entered as the value to that key will be returned
71
        as a logging.WARNING, UserWarning, or ValueError, respectively. If the
72
        'change' kwarg is set, the clean level will be changed to the specified
73
        value. (default=None)
74

75
    """
76

77
    self.test_clean_kwarg = test_clean_kwarg
8✔
78

79
    if isinstance(test_clean_kwarg, dict):
8✔
80
        if 'change' in test_clean_kwarg.keys():
8✔
81
            self.clean_level = test_clean_kwarg['change']
8✔
82

83
        if 'logger' in test_clean_kwarg.keys():
8✔
84
            pysat.logger.warning(test_clean_kwarg['logger'])
8✔
85

86
        if 'warning' in test_clean_kwarg.keys():
8✔
87
            warnings.warn(test_clean_kwarg['warning'], UserWarning)
8✔
88

89
        if 'error' in test_clean_kwarg.keys():
8✔
90
            raise ValueError(test_clean_kwarg['error'])
8✔
91

92
    return
8✔
93

94

95
# Optional methods
96
def concat_data(self, new_data, **kwargs):
8✔
97
    """Concatonate data to self.data for extra time dimensions.
98

99
    Parameters
100
    ----------
101
    new_data : xarray.Dataset or list of such objects
102
        New data objects to be concatonated
103
    **kwargs : dict
104
        Optional keyword arguments passed to xr.concat
105

106
    Note
107
    ----
108
    Expects the extra time dimensions to have a variable name that starts
109
    with 'time', and no other dimensions to have a name that fits this format.
110

111
    """
112
    # Establish the time dimensions, ensuring the standard variable is included
113
    # whether or not it is treated as a variable
114
    time_dims = [self.index.name]
8✔
115
    time_dims.extend([var for var in self.variables if var.find('time') == 0
8✔
116
                      and var != self.index.name])
117

118
    # Concatonate using the appropriate method for the number of time
119
    # dimensions
120
    if len(time_dims) == 1:
8✔
121
        # There is only one time dimensions, but other dimensions may
122
        # need to be adjusted
123
        new_data = pysat.utils.coords.expand_xarray_dims(
8✔
124
            new_data, self.meta, exclude_dims=time_dims)
125

126
        # Specify the dimension, if not otherwise specified
127
        if 'dim' not in kwargs:
8✔
128
            kwargs['dim'] = self.index.name
8✔
129

130
        self.data = xr.concat(new_data, **kwargs)
8✔
131
    else:
132
        inners = None
7✔
133
        for ndata in new_data:
7✔
134
            # Separate into inner datasets
135
            inner_keys = {dim: [key for key in ndata.keys()
7✔
136
                                if dim in ndata[key].dims] for dim in time_dims}
137
            inner_dat = {dim: ndata.get(inner_keys[dim]) for dim in time_dims}
7✔
138

139
            # Add 'single_var's into 'time' dataset to keep track
140
            sv_keys = [val.name for val in ndata.values()
7✔
141
                       if 'single_var' in val.dims]
142
            singlevar_set = ndata.get(sv_keys)
7✔
143
            inner_dat[self.index.name] = xr.merge([inner_dat[self.index.name],
7✔
144
                                                   singlevar_set])
145

146
            # Concatenate along desired dimension with previous data
147
            if inners is None:
7✔
148
                # No previous data, assign the data separated by dimension
149
                inners = dict(inner_dat)
7✔
150
            else:
151
                # Concatenate with existing data
152
                inners = {dim: xr.concat([inners[dim], inner_dat[dim]],
7✔
153
                                         dim=dim) for dim in time_dims}
154

155
        # Combine all time dimensions
156
        if inners is not None:
7✔
157
            data_list = [inners[dim] for dim in time_dims]
7✔
158
            self.data = xr.merge(data_list)
7✔
159
    return
8✔
160

161

162
def preprocess(self, test_preprocess_kwarg=None):
8✔
163
    """Perform standard preprocessing.
164

165
    This routine is automatically applied to the Instrument object
166
    on every load by the pysat nanokernel (first in queue). Object
167
    modified in place.
168

169
    Parameters
170
    ----------
171
    test_preprocess_kwarg : any
172
        Testing keyword (default=None)
173

174
    """
175
    self.test_preprocess_kwarg = test_preprocess_kwarg
8✔
176

177
    return
8✔
178

179

180
# Utility functions
181
def initialize_test_meta(epoch_name, data_keys):
8✔
182
    """Initialize meta data for test instruments.
183

184
    This routine should be applied to test instruments at the end of the load
185
    routine.
186

187
    Parameters
188
    ----------
189
    epoch_name : str
190
        The variable name of the instrument epoch.
191
    data : pds.DataFrame or xr.Dataset
192
        The dataset keys from the instrument.
193

194
    """
195
    # Create standard metadata for all parameters
196
    data_types = {'uts': float, 'mlt': float, 'slt': float, 'longitude': float,
8✔
197
                  'latitude': float, 'altitude': float, 'orbit_num': int,
198
                  'dummy1': int, 'dummy2': int, 'dummy3': int, 'dummy4': int,
199
                  'unicode_dummy': str, 'string_dummy': str,
200
                  'dummy_drifts': float, 'int8_dummy': int, 'int16_dummy': int,
201
                  'int32_dummy': int, 'int64_dummy': int, 'profiles': int,
202
                  'series_profiles': float}
203
    meta = pysat.Meta(data_types=data_types)
8✔
204
    meta['uts'] = {'units': 's', 'long_name': 'Universal Time',
8✔
205
                   'desc': 'Number of seconds since mindight UT',
206
                   'value_min': 0.0, 'value_max': 86400.0}
207
    meta['mlt'] = {'units': 'hours', 'long_name': 'Magnetic Local Time',
8✔
208
                   'value_min': 0.0, 'value_max': 24.0,
209
                   'desc': 'Local time at magnetic field line at equator.'}
210
    meta['slt'] = {'units': 'hours', 'long_name': 'Solar Local Time',
8✔
211
                   'value_min': 0.0, 'value_max': 24.0,
212
                   'desc': 'Mean solar time.',
213
                   'notes': 'Example of notes.'}
214
    meta['longitude'] = {'units': 'degrees', 'long_name': 'Longitude',
8✔
215
                         'value_min': 0.0, 'value_max': 360.0,
216
                         'desc': 'Geographic Longitude'}
217
    meta['latitude'] = {'units': 'degrees', 'long_name': 'Latitude',
8✔
218
                        'value_min': -90.0, 'value_max': 90.0,
219
                        'desc': 'Geographic Latituce'}
220
    meta['altitude'] = {'units': 'km', 'long_name': 'Altitude',
8✔
221
                        'value_min': 0.0, 'value_max': np.inf,
222
                        'desc': 'Height above mean Earth.'}
223
    meta['orbit_num'] = {'units': '', 'long_name': 'Orbit Number',
8✔
224
                         'desc': 'Orbit Number', 'value_min': 0,
225
                         'value_max': 25000, 'fill': -1,
226
                         'notes': ''.join(['Number of orbits since the start ',
227
                                           'of the mission. For this ',
228
                                           'simulation we use the number of ',
229
                                           '5820 second periods since the ',
230
                                           'start, 2008-01-01.'])}
231

232
    meta['dummy1'] = {'value_min': 0, 'value_max': 24, 'fill': -1}
8✔
233
    meta['dummy2'] = {'value_min': 0, 'value_max': 24, 'fill': -1}
8✔
234
    meta['dummy3'] = {'value_min': 0., 'value_max': 24024.}
8✔
235
    meta['dummy4'] = {'desc': 'Dummy variable - UTS like', 'value_min': 0.,
8✔
236
                      'value_max': 86400., 'fill': np.nan}
237

238
    meta['unicode_dummy'] = {'desc': 'Dummy unicode variable.', 'units': ''}
8✔
239
    meta['string_dummy'] = {'desc': 'Dummy string variable.', 'units': ''}
8✔
240

241
    meta['dummy_drifts'] = {'desc': 'Dummy drift values.', 'value_min': -1000.,
8✔
242
                            'value_max': 1000., 'fill': np.nan}
243

244
    # Add metadata for integer dummy variables
245
    meta_dict = {'value_min': 0, 'value_max': 2, 'fill': -1}
8✔
246
    var_list = ['int8_dummy', 'int16_dummy', 'int32_dummy', 'int64_dummy']
8✔
247
    for var in var_list:
8✔
248
        meta[var] = meta_dict
8✔
249

250
    # Standard metadata required for xarray
251
    meta['profiles'] = {'long_name': 'profiles', 'value_min': 0,
8✔
252
                        'value_max': 4294967295, 'fill': -1,
253
                        'desc': ''.join(['Testing profile multi-dimensional ',
254
                                         'data indexed by time.']),
255
                        'notes': ''.join([
256
                            'Note the value_max is largest netCDF4 supports, ',
257
                            'but is lower than actual 64-bit int limit.'])}
258

259
    # Optional and standard metadata for xarray
260
    for var in data_keys:
8✔
261
        if var.find('variable_profiles') == 0:
8✔
262
            meta[var] = {'desc': 'Profiles with variable altitude.'}
8✔
263

264
            if len(var) > 17:
8✔
265
                tvar = 'time{:s}'.format(var[17:])
8✔
266
                meta[tvar] = {'desc': 'Additional time variable.'}
8✔
267

268
    # Standard metadata required for xarray.
269
    meta['profile_height'] = {'value_min': 0, 'value_max': 14, 'fill': -1,
8✔
270
                              'desc': 'Altitude of profile data.'}
271
    meta['variable_profile_height'] = {'long_name': 'Variable Profile Height'}
8✔
272

273
    # Standard metadata required for xarray.
274
    meta['images'] = {'desc': 'pixel value of image',
8✔
275
                      'notes': 'function of image_lat and image_lon'}
276
    meta['x'] = {'desc': 'x-value of image pixel',
8✔
277
                 'notes': 'Dummy Variable',
278
                 'value_min': 0, 'value_max': 7, 'fill': -1}
279
    meta['y'] = {'desc': 'y-value of image pixel',
8✔
280
                 'notes': 'Dummy Variable',
281
                 'value_min': 0, 'value_max': 7, 'fill': -1}
282
    meta['z'] = {'desc': 'z-value of profile height',
8✔
283
                 'notes': 'Dummy Variable',
284
                 'value_min': 0, 'value_max': 5, 'fill': -1}
285
    meta['image_lat'] = {'desc': 'Latitude of image pixel',
8✔
286
                         'notes': 'Dummy Variable',
287
                         'value_min': -90., 'value_max': 90.}
288
    meta['image_lon'] = {'desc': 'Longitude of image pixel',
8✔
289
                         'notes': 'Dummy Variable',
290
                         'value_min': 0., 'value_max': 360.}
291

292
    # Drop unused meta data for desired instrument.
293
    for var in meta.keys():
8✔
294
        if var not in data_keys:
8✔
295
            meta.drop(var)
8✔
296

297
    return meta
8✔
298

299

300
def list_files(tag='', inst_id='', data_path='', format_str=None,
8✔
301
               file_date_range=None, test_dates=None, mangle_file_dates=False,
302
               test_list_files_kwarg=None):
303
    """Produce a fake list of files spanning three years.
304

305
    Parameters
306
    ----------
307
    tag : str
308
        Tag name used to identify particular data set to be loaded.
309
        This input is nominally provided by pysat itself. (default='')
310
    inst_id : str
311
        Instrument ID used to identify particular data set to be loaded.
312
        This input is nominally provided by pysat itself. (default='')
313
    data_path : str
314
        Path to data directory. This input is nominally provided by pysat
315
        itself. (default='')
316
    format_str : str or NoneType
317
        File format string. This is passed from the user at pysat.Instrument
318
         instantiation, if provided. (default=None)
319
    file_date_range : pds.date_range
320
        File date range. The default mode generates a list of 3 years of daily
321
        files (1 year back, 2 years forward) based on the test_dates passed
322
        through below.  Otherwise, accepts a range of files specified by the
323
        user. (default=None)
324
    test_dates : dt.datetime or NoneType
325
        Pass the _test_date object through from the test instrument files
326
    mangle_file_dates : bool
327
        If True, file dates are shifted by 5 minutes. (default=False)
328
    test_list_files_kwarg : any
329
        Testing keyword (default=None)
330

331
    Returns
332
    -------
333
    Series of filenames indexed by file time
334

335
    """
336

337
    # Support keyword testing
338
    pysat.logger.info(''.join(('test_list_files_kwarg = ',
8✔
339
                               str(test_list_files_kwarg))))
340

341
    # Determine the appropriate date range for the fake files
342
    if file_date_range is None:
8✔
343
        start = test_dates[''][''] - pds.DateOffset(years=1)
8✔
344
        stop = (test_dates[''][''] + pds.DateOffset(years=2)
8✔
345
                - pds.DateOffset(days=1))
346
        file_date_range = pds.date_range(start, stop)
8✔
347

348
    index = file_date_range
8✔
349

350
    # Mess with file dates if kwarg option set
351
    if mangle_file_dates:
8✔
352
        index = index + dt.timedelta(minutes=5)
8✔
353

354
    # Create the list of fake filenames
355
    names = [data_path + date.strftime('%Y-%m-%d') + '.nofile'
8✔
356
             for date in index]
357

358
    return pds.Series(names, index=index)
8✔
359

360

361
def list_remote_files(tag='', inst_id='', data_path='', format_str=None,
8✔
362
                      start=None, stop=None, test_dates=None, user=None,
363
                      password=None, mangle_file_dates=False,
364
                      test_list_remote_kwarg=None):
365
    """Produce a fake list of files to simulate new files on a remote server.
366

367
    Note
368
    ----
369
    List spans three years and one month.
370

371
    Parameters
372
    ----------
373
    tag : str
374
        Tag name used to identify particular data set.
375
        This input is nominally provided by pysat itself. (default='')
376
    inst_id : str
377
        Instrument ID used to identify particular data.
378
        This input is nominally provided by pysat itself. (default='')
379
    data_path : str
380
        Path to data directory. This input is nominally provided by pysat
381
        itself. (default='')
382
    format_str : str or NoneType
383
        file format string (default=None)
384
    start : dt.datetime or NoneType
385
        Starting time for file list. A None value will start 1 year before
386
        test_date
387
        (default=None)
388
    stop : dt.datetime or NoneType
389
        Ending time for the file list.  A None value will stop 2 years 1 month
390
        after test_date
391
        (default=None)
392
    test_dates : dt.datetime or NoneType
393
        Pass the _test_date object through from the test instrument files
394
    user : str or NoneType
395
        User string input used for download. Provided by user and passed via
396
        pysat. If an account is required for dowloads this routine here must
397
        error if user not supplied. (default=None)
398
    password : str or NoneType
399
        Password for data download. (default=None)
400
    mangle_file_dates : bool
401
        If True, file dates are shifted by 5 minutes. (default=False)
402
    test_list_remote_kwarg : any
403
        Testing keyword (default=None)
404

405
    Returns
406
    -------
407
    pds.Series
408
        Filenames indexed by file time, see list_files for more info
409

410
    """
411

412
    # Support keyword testing
413
    pysat.logger.info(''.join(('test_list_remote_kwarg = ',
8✔
414
                               str(test_list_remote_kwarg))))
415

416
    # Determine the appropriate date range for the fake files
417
    if start is None:
8✔
418
        start = test_dates[''][''] - pds.DateOffset(years=1)
8✔
419

420
    if stop is None:
8✔
421
        stop = (test_dates[''][''] + pds.DateOffset(years=2)
8✔
422
                - pds.DateOffset(days=1) + pds.DateOffset(months=1))
423

424
    file_date_range = pds.date_range(start, stop)
8✔
425

426
    return list_files(tag=tag, inst_id=inst_id, data_path=data_path,
8✔
427
                      format_str=format_str, file_date_range=file_date_range,
428
                      mangle_file_dates=mangle_file_dates,
429
                      test_dates=test_dates)
430

431

432
def download(date_array, tag, inst_id, data_path='', user=None,
8✔
433
             password=None, test_download_kwarg=None):
434
    """Pass through when asked to download for a test instrument.
435

436
    Parameters
437
    ----------
438
    date_array : array-like
439
        list of datetimes to download data for. The sequence of dates need not
440
        be contiguous.
441
    tag : str
442
        Tag identifier used for particular dataset. This input is provided by
443
        pysat.
444
    inst_id : str
445
        Instrument ID string identifier used for particular dataset. This input
446
        is provided by pysat.
447
    data_path : str
448
        Path to directory to download data to. (default='')
449
    user : string or NoneType
450
        User string input used for download. Provided by user and passed via
451
        pysat. If an account is required for downloads this routine here must
452
        error if user not supplied. (default=None)
453
    password : string or NoneType
454
        Password for data download. (default=None)
455
    test_download_kwarg : any
456
        Testing keyword (default=None)
457

458
    Raises
459
    ------
460
    ValueError
461
        When user/password are required but not supplied
462

463
    Warnings
464
    --------
465
    When no download support will be provided
466

467
    Note
468
    ----
469
    This routine is invoked by pysat and is not intended for direct use by the
470
    end user.
471

472
    """
473

474
    # Support keyword testing
475
    pysat.logger.info(''.join(('test_download_kwarg = ',
8✔
476
                               str(test_download_kwarg))))
477

478
    if tag == 'no_download':
8✔
479
        warnings.warn('This simulates an instrument without download support')
8✔
480

481
    # Check that user name and password are passed through the unit tests
482
    if tag == 'user_password':
8✔
483
        if (not user) and (not password):
8✔
484
            # Note that this line will be uncovered if test succeeds
485
            raise ValueError(' '.join(('Tests are not passing user and',
×
486
                                       'password to test instruments')))
487

488
    return
8✔
489

490

491
def generate_fake_data(t0, num_array, period=5820, data_range=[0.0, 24.0],
8✔
492
                       cyclic=True):
493
    """Generate fake data over a given range.
494

495
    Parameters
496
    ----------
497
    t0 : float
498
        Start time in seconds
499
    num_array : array_like
500
        Array of time steps from t0.  This is the index of the fake data
501
    period : int
502
        The number of seconds per period.
503
        (default = 5820)
504
    data_range : float
505
        For cyclic functions, the range of data values cycled over one period.
506
        Not used for non-cyclic functions.
507
        (default = 24.0)
508
    cyclic : bool
509
        If True, assume that fake data is a cyclic function (ie, longitude,
510
        slt) that will reset to data_range[0] once it reaches data_range[1].
511
        If False, continue to monotonically increase
512

513
    Returns
514
    -------
515
    data : array-like
516
        Array with fake data
517

518
    """
519

520
    if cyclic:
8✔
521
        uts_root = np.mod(t0, period)
8✔
522
        data = (np.mod(uts_root + num_array, period)
8✔
523
                * (np.diff(data_range)[0] / np.float64(period))) + data_range[0]
524
    else:
525
        data = ((t0 + num_array) / period).astype(int)
8✔
526

527
    return data
8✔
528

529

530
def generate_times(fnames, num, freq='1s', start_time=None):
8✔
531
    """Construct list of times for simulated instruments.
532

533
    Parameters
534
    ----------
535
    fnames : list
536
        List of filenames.
537
    num : int
538
        Maximum number of times to generate.  Data points will not go beyond the
539
        current day.
540
    freq : str
541
        Frequency of temporal output, compatible with pandas.date_range
542
        (default='1s')
543
    start_time : dt.timedelta or NoneType
544
        Offset time of start time in fractional hours since midnight UT.
545
        If None, set to 0.
546
        (default=None)
547

548
    Returns
549
    -------
550
    uts : array
551
        Array of integers representing uts for a given day
552
    index : pds.DatetimeIndex
553
        The DatetimeIndex to be used in the pysat test instrument objects
554
    date : datetime
555
        The requested date reconstructed from the fake file name
556

557
    """
558

559
    if isinstance(num, str):
8✔
560
        estr = ''.join(('generate_times support for input strings interpreted ',
×
561
                        'as the number of times has been deprecated. Please ',
562
                        'switch to using integers.'))
563
        warnings.warn(estr, DeprecationWarning)
×
564

565
    if start_time is not None and not isinstance(start_time, dt.timedelta):
8✔
566
        raise ValueError('start_time must be a dt.timedelta object')
8✔
567

568
    uts = []
8✔
569
    indices = []
8✔
570
    dates = []
8✔
571
    for loop, fname in enumerate(fnames):
8✔
572
        # Grab date from filename
573
        parts = os.path.split(fname)[-1].split('-')
8✔
574
        yr = int(parts[0])
8✔
575
        month = int(parts[1])
8✔
576
        day = int(parts[2][0:2])
8✔
577
        date = dt.datetime(yr, month, day)
8✔
578
        dates.append(date)
8✔
579

580
        # Create one day of data at desired frequency
581
        end_date = date + dt.timedelta(seconds=86399)
8✔
582
        if start_time is not None:
8✔
583
            start_date = date + start_time
8✔
584
        else:
585
            start_date = date
8✔
586
        index = pds.date_range(start=start_date, end=end_date, freq=freq)
8✔
587
        index = index[0:num]
8✔
588
        indices.extend(index)
8✔
589
        uts.extend(index.hour * 3600 + index.minute * 60 + index.second
8✔
590
                   + index.microsecond * 1e-6 + 86400. * loop)
591

592
    # Combine index times together
593
    index = pds.DatetimeIndex(indices)
8✔
594

595
    # Make UTS an array
596
    uts = np.array(uts)
8✔
597

598
    return uts, index, dates
8✔
599

600

601
def define_period():
8✔
602
    """Define the default periods for the fake data functions.
603

604
    Returns
605
    -------
606
    def_period : dict
607
        Dictionary of periods to use in test instruments
608

609
    Note
610
    ----
611
    Local time and longitude slightly out of sync to simulate motion of Earth
612

613
    """
614

615
    def_period = {'lt': 5820,  # 97 minutes
8✔
616
                  'lon': 6240,  # 104 minutes
617
                  'angle': 5820}
618

619
    return def_period
8✔
620

621

622
def define_range():
8✔
623
    """Define the default ranges for the fake data functions.
624

625
    Returns
626
    -------
627
    def_range : dict
628
        Dictionary of periods to use in test instruments
629

630
    """
631

632
    def_range = {'lt': [0.0, 24.0],
8✔
633
                 'lon': [0.0, 360.0],
634
                 'angle': [0.0, 2.0 * np.pi]}
635

636
    return def_range
8✔
637

638

639
def create_files(inst, start, stop, freq='1D', use_doy=True,
8✔
640
                 root_fname='pysat_testing_{year:04d}_{day:03d}.txt',
641
                 version=False, content=None, timeout=None):
642
    """Create a file set using the year and day of year.
643

644
    Parameters
645
    ----------
646
    inst : pysat.Instrument
647
        A test instrument, used to generate file path
648
    start : dt.datetime
649
        The date for the first file to create
650
    stop : dt.datetime
651
        The date for the last file to create
652
    freq : str
653
        Frequency of file output.  Codes correspond to pandas.date_range
654
        codes (default='1D')
655
    use_doy : bool
656
        If True use Day of Year (doy), if False use day of month and month.
657
        (default=True)
658
    root_fname : str
659
        The format of the file name to create. Supports standard pysat template
660
        variables 'year', 'month', 'day', 'hour', 'minute', 'second', 'version',
661
        'revision', 'cycle'. (default='pysat_testing_{year:04d}_{day:03d}.txt')
662
    version : bool
663
        If True, iterate over version / revision / cycle. If False,
664
        ignore version / revision / cycle. (default=False)
665
    content : str
666
        Custom text to write to temporary files (default=None)
667
    timeout : float
668
        Time is seconds to lock the files being created.  If None, no timeout is
669
        used.  (default=None)
670

671
    Examples
672
    --------
673
    ::
674

675
        # Commands below create empty files located at `inst.files.data_path`,
676
        # one per day, spanning 2008, where `year`, `month`, and `day`
677
        # are filled in using the provided template string appropriately.
678
        # The produced files are named like: 'pysat_testing_2008_01_01.txt'
679
        import datetime as dt
680
        inst = pysat.Instrument('pysat', 'testing')
681
        root_fname='pysat_testing_{year:04d}_{month:02d}_{day:02d}.txt'
682
        create_files(inst, dt.datetime(2008, 1, 1), dt.datetime(2008, 12, 31),
683
                     root_fname=root_fname, use_doy=False)
684

685

686
        # The command below uses the default values for `create_files`, which
687
        # produces a daily set of files, labeled by year and day of year.
688
        # The files are names like: 'pysat_testing_2008_001.txt'
689
        create_files(inst, dt.datetime(2008, 1, 1), dt.datetime(2008, 12, 31))
690

691
    """
692

693
    # Define the time range and file naming variables
694
    dates = putime.create_date_range(start, stop, freq=freq)
8✔
695

696
    if version:
8✔
697
        versions = np.array([1, 2])
8✔
698
        revisions = np.array([0, 1])
8✔
699
        cycles = np.array([0, 1])
8✔
700
    else:
701
        versions = [None]
8✔
702
        revisions = [None]
8✔
703
        cycles = [None]
8✔
704

705
    # Create empty files
706
    for date in dates:
8✔
707
        yr, doy = putime.getyrdoy(date)
8✔
708
        if not use_doy:
8✔
709
            doy = date.day
8✔
710
        for version in versions:
8✔
711
            for revision in revisions:
8✔
712
                for cycle in cycles:
8✔
713

714
                    fname = os.path.join(inst.files.data_path,
8✔
715
                                         root_fname.format(year=yr,
716
                                                           day=doy,
717
                                                           month=date.month,
718
                                                           hour=date.hour,
719
                                                           minute=date.minute,
720
                                                           second=date.second,
721
                                                           version=version,
722
                                                           revision=revision,
723
                                                           cycle=cycle))
724
                    with NetworkLock(fname, 'w') as fout:
8✔
725
                        if content is not None:
8✔
726
                            fout.write(content)
8✔
727
                        if timeout is not None:
8✔
728
                            time.sleep(timeout)
×
729
    return
8✔
730

731

732
def non_monotonic_index(index):
8✔
733
    """Adjust the index to be non-monotonic.
734

735
    Parameters
736
    ----------
737
    index : pds.DatetimeIndex
738
        The index generated in an instrument test file.
739

740
    Returns
741
    -------
742
    new_index : pds.DatetimeIndex
743
        A non-montonic index
744

745
    """
746

747
    new_index = index.tolist()
8✔
748

749
    # Create a non-monotonic index
750
    new_index[6:9], new_index[3:6] = new_index[3:6], new_index[6:9]
8✔
751

752
    # Convert back to DatetimeIndex
753
    new_index = pds.to_datetime(new_index)
8✔
754

755
    return new_index
8✔
756

757

758
def non_unique_index(index):
8✔
759
    """Adjust the index to be non-unique.
760

761
    Parameters
762
    ----------
763
    index : pds.DatetimeIndex
764
        The index generated in an instrument test file.
765

766
    Returns
767
    -------
768
    new_index : pds.DatetimeIndex
769
        A non-unique index
770

771
    """
772

773
    new_index = index.tolist()
8✔
774

775
    # Create a non-unique index
776
    new_index[1:3] = [new_index[1]] * 2
8✔
777

778
    # Convert back to DatetimeIndex
779
    new_index = pds.to_datetime(new_index)
8✔
780

781
    return new_index
8✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc