• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

spedas / pyspedas / 21973989521

13 Feb 2026 03:53AM UTC coverage: 90.293% (+0.05%) from 90.246%
21973989521

push

github

jameswilburlewis
Comment out tplot calls, add an assertion for mixed provisional/realtime test

1 of 1 new or added line in 1 file covered. (100.0%)

63 existing lines in 4 files now uncovered.

41962 of 46473 relevant lines covered (90.29%)

0.9 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

77.7
/pyspedas/tplot_tools/store_data.py
1
# Copyright 2020 Regents of the University of Colorado. All Rights Reserved.
2
# Released under the MIT license.
3
# This software was developed at the University of Colorado's Laboratory for Atmospheric and Space Physics.
4
# Verify current version before use at: https://github.com/MAVENSDC/PyTplot
5

6
import pandas as pd
1✔
7
import numpy as np
1✔
8
import datetime
1✔
9
import logging
1✔
10
from pyspedas.tplot_tools import del_data, tplot_rename, get_y_range, replace_metadata
1✔
11
import pyspedas
1✔
12
import xarray as xr
1✔
13
import copy
1✔
14
import warnings
1✔
15
from pyspedas import is_timezone_aware
1✔
16

17
tplot_num = 1
1✔
18

19

20
def store_data(name, data=None, delete=False, newname=None, attr_dict={}):
1✔
21
    
22
    """
23
    Create a "Tplot Variable" (similar to the IDL SPEDAS concept) based on the inputs, and
24
    stores this data in memory.  Tplot Variables store all of the information
25
    needed to generate a plot.  
26
    
27
    Parameters
28
    ----------
29
        name : str 
30
            Name of the tplot variable that will be created
31
        data : dict or list[str]
32
            A python dictionary object for creating a single variable, or a list of base variables to combine them into a 'pseudovariable'
33
            
34
            'x' should be a 1-dimensional array that represents the data's x axis.  If x is a numeric type, it is interpreted
35
            as seconds since the Unix epoch.  x can also be passed as Pandas Series object, datetime.datetime, numpy.datetime64, or strings.
36
            represented in seconds since epoch (January 1st 1970)
37
            
38
            'y' should be the data values. This can be 2 dimensions if multiple lines or a spectrogram are desired.
39
            
40
            'v' is optional, and is only used for spectrogram plots.  This will be a list of bins to be used.  If this
41
            is provided, then 'y' should have dimensions of x by z.
42

43
            'v1/v2/v3/etc' are also optional, and are only used for to spectrogram plots.  These will act as the coordinates
44
            for 'y' if 'y' has numerous dimensions.  By default, 'v2' is plotted in spectrogram plots.
45

46
        delete : bool, optional
47
            If True, deletes the tplot variable matching the "name" parameter
48
            Default: False
49
        newname: str
50
            If set, renames TVar to new name
51
            Default: False
52
        attr_dict: dict
53
            A dictionary object of attributes (these do not affect routines in pyspedas, this is merely to keep metadata alongside the file)
54
            Default: {} (empty dictionary)
55
        
56
    .. note::
57
        If you want to combine multiple tplot variables into one, simply supply the list of tplot variables to the
58
        "data" parameter.  This will cause the data to overlay when plotted.
59
        
60
    Returns
61
    -------
62
        bool
63
            True if successful, False otherwise
64
        
65
    Examples
66
    --------
67
        >>> # Store a single line
68
        >>> import pyspedas
69
        >>> x_data = [1,2,3,4,5]
70
        >>> y_data = [1,2,3,4,5]
71
        >>> pyspedas.store_data("Variable1", data={'x':x_data, 'y':y_data})
72
    
73
        >>> # Store two lines
74
        >>> x_data = [1,2,3,4,5]
75
        >>> y_data = [[1,5],[2,4],[3,3],[4,2],[5,1]]
76
        >>> pyspedas.store_data("Variable2", data={'x':x_data, 'y':y_data})
77
        
78
        >>> # Store a spectrogram
79
        >>> x_data = [1,2,3]
80
        >>> y_data = [ [1,2,3] , [4,5,6], [7,8,9] ]
81
        >>> v_data = [1,2,3]
82
        >>> pyspedas.store_data("Variable3", data={'x':x_data, 'y':y_data, 'v':v_data})
83
        
84
        >>> # Combine two different line plots
85
        >>> pyspedas.store_data("Variable1and2", data=['Variable1', 'Variable2'])
86
        
87
        >>> #Rename TVar
88
        >>> pyspedas.store_data('a', data={'x':[0,4,8,12,16], 'y':[1,2,3,4,5]})
89
        >>> pyspedas.store_data('a',newname='f')
90

91
    """
92
    
93
    # global tplot_num
94
    create_time = datetime.datetime.now()
1✔
95
    # If delete is specified, we are just deleting the variable
96
    if delete is True:
1✔
97
        del_data(name)
1✔
98
        return False
1✔
99

100
    if data is None and newname is None and attr_dict is None:
1✔
101
        logging.error('store_data: data array, newname, and attr_dict all unspecified, nothing to do.')
×
UNCOV
102
        return False
×
103

104
    if data is None and newname is None and attr_dict is not None:
1✔
105
        replace_metadata(name,attr_dict)
1✔
106
        return True
1✔
107

108
    # If newname is specified, we are just renaming the variable
109
    if newname is not None:
1✔
110
        tplot_rename(name, newname)
1✔
111
        return True
1✔
112

113
    # if isinstance(data, str):
114
    #     pyspedas.tplot_tools.data_quants[name] = {'name': name, 'data': data}
115
    #     return True
116
    if isinstance(data, str):
1✔
UNCOV
117
        data = data.split(' ')
×
118

119
    # If the data is a list instead of a dictionary, user is looking to overplot
120
    if isinstance(data, list):
1✔
121
        base_data = _get_base_tplot_vars(name,data)
1✔
122
        if len(base_data) == 0:
1✔
123
            logging.warning("store_data: None of the base variables exist to construct pseudovariable %s",name)
1✔
124
            return False
1✔
125
        # Copying the first variable to use all of its plot options
126
        # However, we probably want each overplot to retain its original plot option
127
        pyspedas.tplot_tools.data_quants[name] = copy.deepcopy(pyspedas.tplot_tools.data_quants[base_data[0]])
1✔
128
        pyspedas.tplot_tools.data_quants[name].attrs = copy.deepcopy(pyspedas.tplot_tools.data_quants[base_data[0]].attrs)
1✔
129
        pyspedas.tplot_tools.data_quants[name].name = name
1✔
130
        pyspedas.tplot_tools.data_quants[name].attrs['plot_options']['overplots'] = base_data[1:]
1✔
131
        pyspedas.tplot_tools.data_quants[name].attrs['plot_options']['overplots_mpl'] = base_data
1✔
132
        # These sets of options should default to the sub-variables' options, not simply
133
        # copied from the first variable in the list.   These options can be still be set
134
        # on the pseudovariable, and they will override the sub-variable options.
135
        pyspedas.tplot_tools.data_quants[name].attrs['plot_options']['yaxis_opt'] = {}
1✔
136
        pyspedas.tplot_tools.data_quants[name].attrs['plot_options']['zaxis_opt'] = {}
1✔
137
        pyspedas.tplot_tools.data_quants[name].attrs['plot_options']['line_opt'] = {}
1✔
138
        pyspedas.tplot_tools.data_quants[name].attrs['plot_options']['extras'] = {}
1✔
139
        return True
1✔
140

141
    # if the data table doesn't contain an 'x', assume this is a non-record varying variable
142
    if 'x' not in data.keys():
1✔
143
        values = np.array(data.pop('y'))
1✔
144
        pyspedas.tplot_tools.data_quants[name] = {'data': values}
1✔
145
        pyspedas.tplot_tools.data_quants[name]['name'] = name
1✔
146
        return True
1✔
147

148
    times = data.pop('x')
1✔
149

150
    with warnings.catch_warnings():
1✔
151
        warnings.simplefilter("ignore")
1✔
152
        values = np.array(data.pop('y'))
1✔
153

154
    if 'dy' in data.keys():
1✔
155
        err_values = np.array(data.pop('dy'))
1✔
156

157
        if len(err_values) != len(times):
1✔
UNCOV
158
            logging.warning('store_data: Warning: %s: length of error values (%d) does not match length of time values (%d)',name,len(err_values),
×
159
                            len(times))
160
    else:
161
        err_values = None
1✔
162

163
    # Convert input time representation to np.datetime64 objects, if needed
164
    if isinstance(times, pd.Series):
1✔
165
        datetimes = times.to_numpy(dtype='datetime64[ns]')  # if it is pandas series, convert to numpy array
1✔
166
    elif isinstance(times[0],datetime.datetime):
1✔
167
        # Timezone-naive datetime, do explicit conversion to np.datetime64[ns] and ensure container is a numpy array
168
        if is_timezone_aware(times):
1✔
169
            # Numpy will complain if it is given timezone-aware datetimes to convert.
170
            # So we convert to UTC first, then drop the timezone entirely
171
            tz_aware_utc = [aware_dt.astimezone(datetime.timezone.utc) for aware_dt in times]
1✔
172
            tz_naive = [aware_dt.replace(tzinfo=None) for aware_dt in tz_aware_utc]
1✔
173
            datetimes = np.array(tz_naive,dtype='datetime64[ns]')
1✔
174
        elif isinstance(times,np.ndarray):
1✔
UNCOV
175
            datetimes = times.astype('datetime64[ns]')
×
176
        else:
177
            datetimes = np.array(times,dtype='datetime64[ns]')
1✔
178
    elif isinstance(times[0],np.datetime64):
1✔
179
        # np.datetime64, use as-is, but we might have to convert the container to a numpy array
180
        if isinstance(times,np.ndarray):
1✔
181
            datetimes = times
1✔
182
        else:
183
            datetimes = np.array(times)
1✔
184
        # We want the np.datetime64 resolution to be ns.  If it already is, do nothing, otherwise,
185
        # convert to ns.  In the future, we might support storing times in any resolution,
186
        # and dealing with the conversion in get_data or in client code.
187
        dtype = datetimes.dtype
1✔
188
        if dtype.name != 'datetime64[ns]':
1✔
189
            datetimes = datetimes.astype('datetime64[ns]')
1✔
190
    elif isinstance(times[0],(int,np.integer,float,np.float64)):
1✔
191
        # Assume seconds since Unix epoch, convert to np.datetime64 with nanosecond precision
192
        # Make sure we have a numpy array
193
        if not isinstance(times,np.ndarray):
1✔
194
            times=np.array(times)
1✔
195
        # Replace any NaN or inf values with 0
196
        cond = np.logical_not(np.isfinite(times))
1✔
197
        times[cond] = 0
1✔
198
        datetimes = np.array(times*1e09,dtype='datetime64[ns]')
1✔
199
    elif isinstance(times[0],str):
1✔
200
        # Interpret strings as timestamps, convert to np.datetime64 with nanosecond precision
201
        datetimes = np.array(times,dtype='datetime64[ns]')
1✔
202
    else:
203
        # Hope it's convertable to a numpy array!  This case will get hit for an xarray DataArray.
204
        datetimes = np.array(times).astype('datetime64[ns]')
1✔
205

206
    times = datetimes
1✔
207

208
    # At this point, times should be a numpy array of datetime or np.datetime64 objects
209

210
    if len(values.shape) == 0:
1✔
211
        # This can happen for Cluster variables with only a single sample, as they can
212
        # be incorrectly marked as NRV and lose their leading (time) dimension.
UNCOV
213
        logging.warning("store_data: Data array for %s appears to be a zero-dimensional array; converting to 1-D array.",name)
×
214
        if len(times) == 1:
×
215
            logging.warning("store_data: This is possibly due to the leading array dimension being lost in a scalar variable with a single timestamp.")
×
UNCOV
216
        values = np.array([values])
×
217

218
    if len(values) == 0:
1✔
UNCOV
219
        logging.warning('store_data: %s has empty y component, cannot create variable',name)
×
UNCOV
220
        return False
×
221

222
    if len(times) != len(values):
1✔
223
        # This happens for a few MMS and other data sets. Rather than quitting immediately, go ahead and create
224
        # the variable, but give an informational message about the mismatch.  The fix would probably be for the
225
        # data provider to mark the variable as non-record-variant, and avoid giving it a DEPEND_0 or DEPEND_TIME
226
        # attribute.
227
        logging.info("store_data: %s: lengths of x (%d) and y (%d) do not match! Mislabeled NRV variable?",name,len(times),len(values))
1✔
228

229
    if not isinstance(times,np.ndarray):
1✔
UNCOV
230
        logging.warning("store_data: times was not converted to a numpy array. This should not happen.")
×
231
        times = np.array(times)
×
232

233
    # assumes monotonically increasing time series
234
    if isinstance(times[0], datetime.datetime):
1✔
235
        # This may be dead code now?
UNCOV
236
        trange = [times[0].replace(tzinfo=datetime.timezone.utc).timestamp(),
×
237
                  times[-1].replace(tzinfo=datetime.timezone.utc).timestamp()]
238
    elif isinstance(times[0], np.datetime64):
1✔
239
        trange = np.float64([times[0], times[-1]]) / 1e9
1✔
240
    else:
UNCOV
241
        trange = [times[0], times[-1]]
×
242

243
    # Special case if y is 1-dimensional and 'v' or 'v1' is present
244
    # This can happen if split_data is called on a vector-valued variable that has a DEPEND_1.
245
    # We can't use v as a coordinate, or we'll get a ValueError creating the xarray object,
246
    # so we'll save its value here, then after the xarray object is created, stash it in a different
247
    # attribute.  Then join_vec can find it and restore the depend_1 array from split-out components.
248
    extra_v_values = None
1✔
249
    if len(values.shape) == 1:
1✔
250
        if 'v' in data.keys():
1✔
251
            extra_v_values = np.array(data.pop('v'))
1✔
252
        elif 'v1' in data.keys():
1✔
253
            extra_v_values = np.array(data.pop('v1'))
1✔
254

255
    # Figure out the 'v' data
256
    # This seems to be conflating specplot bins with general DEPEND_N attributes.
257
    # Maybe only do this stuff if it's marked as a spectrum?  But what if it's from
258
    # a NetCDF rather than a CDF?
259
    spec_bins_exist = False
1✔
260
    if 'v' in data or 'v1' in data or 'v2' in data or 'v3' in data:
1✔
261
        # Generally the data is 1D, but occasionally
262
        # the bins will vary in time.
263
        spec_bins_exist = True
1✔
264
        if 'v' in data:
1✔
265
            spec_bins = data['v']
1✔
266
            spec_bins_dimension = 'v'
1✔
267
        elif ("v1" in data) and ("v2" in data) and ("v3" in data):
1✔
268
            spec_bins = data['v2']
1✔
269
            spec_bins_dimension = 'v2'
1✔
270
        elif ("v1" in data) and ("v2" in data):
1✔
271
            spec_bins = data['v2']
1✔
272
            spec_bins_dimension = 'v2'
1✔
273
        else:
274
            # At least one vn is missing.
UNCOV
275
            logging.warning("At least one Vn tag is missing, cannot create spec_bins from variable %s.", name)
×
276
            spec_bins_exist = False
×
277

278
        if spec_bins_exist and type(spec_bins) is not pd.DataFrame:
1✔
279
            try:
1✔
280
                spec_bins = pd.DataFrame(spec_bins)
1✔
281
            except:
×
282
                if spec_bins_dimension=='v':
×
283
                    spec_bins = np.arange(1, len(values[0])+1)
×
UNCOV
284
                elif spec_bins_dimension=="v2":
×
UNCOV
285
                    spec_bins = np.arange(1, len(values[0][0]) + 1)
×
UNCOV
286
                elif spec_bins_dimension=="v3":
×
UNCOV
287
                    spec_bins = np.arange(1, len(values[0][0][0]) + 1)
×
UNCOV
288
                spec_bins = pd.DataFrame(spec_bins)
×
289

290

291
        if spec_bins_exist and len(spec_bins.columns) != 1:
1✔
292
            # The spec_bins are time varying
293
            # Or maybe they're just DEPEND_N and nothing to do with spectra?
294
            spec_bins_time_varying = True
1✔
295
            if len(spec_bins) != len(times):
1✔
296
                # Maybe it's not a spectrum at all?
297
                # Cluster pressure tensor variablea havw a DEPEND_1 that's 2-D, 1x3 [['x','y','z']]
UNCOV
298
                logging.error("store_data: Length of spec_bins (%d) and times (%d) do not match for variable %s.",len(spec_bins),len(times),name)
×
UNCOV
299
                spec_bins = None
×
UNCOV
300
                spec_bins_exist = False
×
301
        elif spec_bins_exist:
1✔
302
            spec_bins = spec_bins.transpose()
1✔
303
            spec_bins_time_varying = False
1✔
304
    else:
305
        spec_bins = None
1✔
306
        # Provide another dimension if values are more than 1 dimension
307
        if len(values.shape) == 2:
1✔
308
            data['v'] = None
1✔
309
        if len(values.shape) > 2:
1✔
310
            data['v1'] = None
1✔
311
            data['v2'] = None
1✔
312
        if len(values.shape) > 3:
1✔
313
            data['v3'] = None
1✔
314

315
    # Set up xarray dimension and coordinates
316
    coordinate_list = sorted(list(data.keys()))
1✔
317
    dimension_list = [d + '_dim' for d in coordinate_list]
1✔
318

319
    if len(coordinate_list) < len(values.shape)-1:
1✔
320
        logging.warning("store_data: Data array for variable %s has %d dimensions, but only %d v_n keys plus time. Adding empty v_n keys.", name, len(values.shape), len(coordinate_list))
1✔
321
        if len(values.shape) == 2:
1✔
322
            data['v'] = None
×
323
        elif len(values.shape) == 3:
1✔
324
            if 'v' in data:
1✔
325
                vdat = data.pop('v')
1✔
326
                data['v1'] = vdat
1✔
UNCOV
327
            elif 'v1' in data:
×
UNCOV
328
                pass
×
329
            if 'v1' not in data:
1✔
UNCOV
330
                data['v1'] = None
×
331
            if 'v2' not in data:
1✔
332
                data['v2'] = None
1✔
333
        elif len(values.shape) == 4:
1✔
334
            # ERG LEPI 3dflux quality flags have this issue
335
            if 'v' in data:
1✔
336
                vdat = data.pop('v')
1✔
337
                data['v1'] = vdat
1✔
UNCOV
338
            elif 'v1' in data:
×
UNCOV
339
                pass
×
340
            if 'v1' not in data:
1✔
UNCOV
341
                data['v1'] = None
×
342
            if 'v2' not in data:
1✔
343
                data['v2'] = None
1✔
344
            if 'v3' not in data:
1✔
345
                data['v3'] = None
1✔
346

347
        coordinate_list = sorted(list(data.keys()))
1✔
348
        dimension_list = [d + '_dim' for d in coordinate_list]
1✔
349
        # Don't try to use these dimensions as coordinates
350
        spec_bins_exist = False
1✔
351
        spec_bins = None
1✔
352

353
    temp = None
1✔
354
    # Ignore warnings about cdflib non-nanosecond precision timestamps for now
355
    with warnings.catch_warnings():
1✔
356
        warnings.filterwarnings("ignore",message="^.*non-nanosecond precision.*$")
1✔
357
        try:
1✔
358
            temp = xr.DataArray(values, dims=['time']+dimension_list,
1✔
359
                                coords={'time': ('time', times)})
360
        except ValueError as err:
1✔
361
            logging.warning("store_data: ValueError trying to set xarray coordinates for variable %s: %s", name, str(err))
1✔
362
            spec_bins_exist = False
1✔
363
            spec_bins = None
1✔
364
            if len(times) == 1:
1✔
365
                logging.warning("store_data: This is possibly due to the leading data dimension being lost in an array-valued or vector-valued variable with a single timestamp.")
×
366
            # If data is 1-dimensional, ignore any DEPEND_N supplied
367
            elif (len(values.shape) == 1) and len(dimension_list) > 0:
1✔
UNCOV
368
                logging.warning("store_data: variable %s is 1-dimensional, but has additional keys defined: %s.  Dropping redundant coordinate(s).",name, dimension_list)
×
UNCOV
369
                temp = xr.DataArray(values, dims=['time'], coords={'time': ('time', times)})
×
UNCOV
370
                coordinate_list=[]
×
UNCOV
371
                dimension_list=[]
×
372
            else:
373
                logging.warning("Giving up on this variable.")
1✔
374
                return
1✔
375

376
    if temp is None:
1✔
377
        # This can happen with mismatched times/data values, and no valid DEPEND_N.
378
        # For example, POLAR MFE data, variable MF_Num
UNCOV
379
        logging.warning("store_data: Unable to create xarray object for variable %s, giving up.", name)
×
UNCOV
380
        return
×
381

382
    if spec_bins_exist:
1✔
383
        try:
1✔
384
            if spec_bins_time_varying:
1✔
385
                temp.coords['spec_bins'] = (('time', spec_bins_dimension+'_dim'), spec_bins.values)
1✔
386
            else:
387
                temp.coords['spec_bins'] = (spec_bins_dimension+'_dim', np.squeeze(spec_bins.values))
1✔
UNCOV
388
        except ValueError as err:
×
UNCOV
389
            logging.warning('store_data: conflicting size for at least one dimension for variable %s', name)
×
UNCOV
390
            logging.warning('store_data: ValueError exception text: %s',str(err))
×
391

392
    for d in coordinate_list:
1✔
393
        if data[d] is None:
1✔
394
            continue
1✔
395
        try:
1✔
396
            d_dimension = pd.DataFrame(data[d])
1✔
397
            if len(d_dimension.columns) != 1:
1✔
398
                if len(d_dimension) != len(times):
1✔
UNCOV
399
                    logging.warning("store_data: Length of %s (%d) and time (%d) do not match.  Cannot create coordinate for %s.",d,len(d_dimension),len(times),name)
×
UNCOV
400
                    continue
×
401
                temp.coords[d] = (('time', d+'_dim'), d_dimension.values)
1✔
402
            else:
403
                d_dimension = d_dimension.transpose()
1✔
404
                squeezed_array = np.squeeze(d_dimension.values)# np.squeeze() does something funny here if this dimension has length 1, causing a ValueError exception
1✔
405
                if d_dimension.size == 1:
1✔
406
                    logging.warning("store_data: Dimension %s of variable %s has length 1",d,name)
1✔
407
                    temp.coords[d] = (d+'_dim', d_dimension.values[0])
1✔
408
                else:
409
                    temp.coords[d] = (d+'_dim', squeezed_array)
1✔
410
        except ValueError as err:
1✔
411
            logging.warning("store_data: Could not create coordinate %s_dim for variable %s",d, name)
1✔
412
            logging.warning("store_data: ValueError exception text: %s", str(err))
1✔
413

414
    # Set up Attributes Dictionaries
415
    xaxis_opt = dict(axis_label='')
1✔
416
    yaxis_opt = dict(axis_label=name) if (spec_bins is None) else dict(axis_label='')
1✔
417
    zaxis_opt = dict(axis_label='Z-Axis') if (spec_bins is None) else dict(axis_label=name)
1✔
418
    xaxis_opt['crosshair'] = 'X'
1✔
419
    yaxis_opt['crosshair'] = 'Y'
1✔
420
    zaxis_opt['crosshair'] = 'Z'
1✔
421
    xaxis_opt['x_axis_type'] = 'linear'
1✔
422
    yaxis_opt['y_axis_type'] = 'linear'
1✔
423
    zaxis_opt['z_axis_type'] = 'linear'
1✔
424
    line_opt = {}
1✔
425
    time_bar = []
1✔
426
    extras = dict(panel_size=1, border=True)
1✔
427
    links = {}
1✔
428

429
    # Add dicts to the xarray attrs
430
    temp.name = name
1✔
431
    temp.attrs = copy.deepcopy(attr_dict)
1✔
432
    if extra_v_values is not None:
1✔
433
        temp.attrs['extra_v_values'] = extra_v_values
1✔
434

435
    if 'plot_options' not in temp.attrs.keys():
1✔
436
        temp.attrs['plot_options'] = {}
1✔
437
        temp.attrs['plot_options']['xaxis_opt'] = xaxis_opt
1✔
438
        temp.attrs['plot_options']['yaxis_opt'] = yaxis_opt
1✔
439
        temp.attrs['plot_options']['zaxis_opt'] = zaxis_opt
1✔
440
        temp.attrs['plot_options']['line_opt'] = line_opt
1✔
441
        temp.attrs['plot_options']['trange'] = trange
1✔
442
        temp.attrs['plot_options']['time_bar'] = time_bar
1✔
443
        temp.attrs['plot_options']['extras'] = extras
1✔
444
        temp.attrs['plot_options']['create_time'] = create_time
1✔
445
        temp.attrs['plot_options']['links'] = links
1✔
446
        #temp.attrs['plot_options']['spec_bins_ascending'] = _check_spec_bins_ordering(times, spec_bins)
447
        temp.attrs['plot_options']['overplots'] = []
1✔
448
        temp.attrs['plot_options']['overplots_mpl'] = []
1✔
449
        temp.attrs['plot_options']['interactive_xaxis_opt'] = {}
1✔
450
        temp.attrs['plot_options']['interactive_yaxis_opt'] = {}
1✔
451
        temp.attrs['plot_options']['error'] = err_values
1✔
452

453
    pyspedas.tplot_tools.data_quants[name] = temp
1✔
454

455
    pyspedas.tplot_tools.data_quants[name].attrs['plot_options']['yaxis_opt']['y_range'] = get_y_range(temp)
1✔
456

457
    return True
1✔
458

459

460
def _get_base_tplot_vars(name,data):
1✔
461
    base_vars = []
1✔
462
    if not isinstance(data, list):
1✔
463
        data = [data]
×
464
    for var in data:
1✔
465
        if var not in pyspedas.tplot_tools.data_quants:
1✔
466
            logging.warning('store_data: Pseudovariable %s component %s not found, skipping', name, var)
1✔
467
        elif isinstance(pyspedas.tplot_tools.data_quants[var].data, list):
1✔
UNCOV
468
            base_vars += _get_base_tplot_vars(name,pyspedas.tplot_tools.data_quants[var].data)
×
469
        else:
470
            base_vars += [var]
1✔
471
    return base_vars
1✔
472

473

474
def _check_spec_bins_ordering(times, spec_bins):
1✔
475
    """
476
    This is a private function, this is run during
477
    object creation to check if spec_bins are ascending or descending
478
    """
479
    if spec_bins is None:
×
480
        return
×
UNCOV
481
    if len(spec_bins) == len(times):
×
482
        break_top_loop = False
×
483
        for index, row in spec_bins.iterrows():
×
484
            if row.isnull().values.all():
×
485
                continue
×
486
            else:
UNCOV
487
                for i in row.index:
×
488
                    if np.isfinite(row[i]) and np.isfinite(row[i + 1]):
×
489
                        ascending = row[i] < row[i + 1]
×
490
                        break_top_loop = True
×
UNCOV
491
                        break
×
492
                    else:
493
                        continue
×
UNCOV
494
                if break_top_loop:
×
UNCOV
495
                    break
×
496
    else:
UNCOV
497
        ascending = spec_bins[0].iloc[0] < spec_bins[1].iloc[0]
×
UNCOV
498
    return ascending
×
499

500

501
def store(name, data=None, delete=False, newname=None, metadata={}):
1✔
502
    """
503
    Create tplot variables. This is a wrapper for store_data, with the only apparent
504
    difference being that 'attr_dict' in store_data is replaced with 'metadata' in store().
505
    This wrapper will likely be removed in a future release.
506
    Parameters:
507
        name : str
508
            Name of the tplot variable that will be created
509
        data : dict
510
            A python dictionary object.
511

512
            'x' should be a 1-dimensional array that represents the data's x axis.  Typically this data is time,
513
            represented in seconds since epoch (January 1st 1970)
514

515
            'y' should be the data values. This can be 2 dimensions if multiple lines or a spectrogram are desired.
516

517
            'v' is optional, and is only used for spectrogram plots.  This will be a list of bins to be used.  If this
518
            is provided, then 'y' should have dimensions of x by z.
519

520
            'v1/v2/v3/etc' are also optional, and are only used for to spectrogram plots.  These will act as the coordinates
521
            for 'y' if 'y' has numerous dimensions.  By default, 'v2' is plotted in spectrogram plots.
522

523
            'x' and 'y' can be any data format that can be read in by the pandas module.  Python lists, numpy arrays,
524
            or any pandas data type will all work.
525
        delete : bool, optional
526
            Deletes the tplot variable matching the "name" parameter
527
        newname: str
528
            Renames TVar to new name
529
        metadata: dict
530
            A dictionary object of attributes (these do not affect routines in pyspedas, this is merely to keep metadata alongside the file)
531

532
    .. note::
533
        If you want to combine multiple tplot variables into one, simply supply the list of tplot variables to the
534
        "data" parameter.  This will cause the data to overlay when plotted.
535

536
    Returns:
537
        None
538

539
    Examples:
540
        >>> # Store a single line
541
        >>> import pyspedas
542
        >>> x_data = [1,2,3,4,5]
543
        >>> y_data = [1,2,3,4,5]
544
        >>> pyspedas.store("Variable1", data={'x':x_data, 'y':y_data})
545

546
        >>> # Store a two lines
547
        >>> x_data = [1,2,3,4,5]
548
        >>> y_data = [[1,5],[2,4],[3,3],[4,2],[5,1]]
549
        >>> pyspedas.store("Variable2", data={'x':x_data, 'y':y_data})
550

551
        >>> # Store a spectrogram
552
        >>> x_data = [1,2,3]
553
        >>> y_data = [ [1,2,3] , [4,5,6], [7,8,9] ]
554
        >>> v_data = [1,2,3]
555
        >>> pyspedas.store("Variable3", data={'x':x_data, 'y':y_data, 'v':v_data})
556

557
        >>> # Combine two different line plots
558
        >>> pyspedas.store("Variable1and2", data=['Variable1', 'Variable2'])
559

560
        >>> #Rename TVar
561
        >>> pyspedas.store('a', data={'x':[0,4,8,12,16], 'y':[1,2,3,4,5]})
562
        >>> pyspedas.store('a',newname='f')
563
    """
564
    return store_data(name, data=data, delete=delete, newname=newname, attr_dict=metadata)
1✔
565

STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc