• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

desihub / desispec / 4421471002

pending completion
4421471002

Pull #2015

github-actions

GitHub
Merge e0651b2a9 into cc81c8f76
Pull Request #2015: desi_proc_tilenight processes skysub when --laststeps=skysub

13 of 13 new or added lines in 4 files covered. (100.0%)

10648 of 43713 relevant lines covered (24.36%)

0.24 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

11.55
/py/desispec/scripts/submit_night.py
1
"""
2
desispec.scripts.submit_night
3
=============================
4

5
"""
6
from desiutil.log import get_logger
1✔
7
import numpy as np
1✔
8
import os
1✔
9
import sys
1✔
10
import time
1✔
11
import re
1✔
12
from astropy.table import Table, vstack
1✔
13
## Import some helper functions, you can see their definitions by uncomenting the bash shell command
14
from desispec.workflow.tableio import load_tables, write_table
1✔
15
from desispec.workflow.utils import pathjoin, sleep_and_report
1✔
16
from desispec.workflow.timing import what_night_is_it
1✔
17
from desispec.workflow.exptable import get_exposure_table_path, \
1✔
18
    get_exposure_table_name, get_last_step_options
19
from desispec.workflow.proctable import default_obstypes_for_proctable, get_processing_table_path, \
1✔
20
                                        get_processing_table_name, erow_to_prow, table_row_to_dict, \
21
                                        default_prow
22
from desispec.workflow.procfuncs import parse_previous_tables, get_type_and_tile, \
1✔
23
                                        define_and_assign_dependency, create_and_submit, \
24
                                        checkfor_and_submit_joint_job, submit_tilenight_and_redshifts
25
from desispec.workflow.queue import update_from_queue, any_jobs_not_complete
1✔
26
from desispec.workflow.desi_proc_funcs import get_desi_proc_batch_file_path
1✔
27
from desispec.workflow.redshifts import read_minimal_exptables_columns
1✔
28
from desispec.io.util import decode_camword, difference_camwords, create_camword
1✔
29

30
def submit_night(night, proc_obstypes=None, z_submit_types=None, queue='realtime',
1✔
31
                 reservation=None, system_name=None,
32
                 exp_table_path=None, proc_table_path=None, tab_filetype='csv',
33
                 dry_run_level=0, dry_run=False, no_redshifts=False, error_if_not_available=True,
34
                 append_to_proc_table=False, ignore_proc_table_failures = False,
35
                 dont_check_job_outputs=False, dont_resubmit_partial_jobs=False,
36
                 tiles=None, surveys=None, laststeps=None, use_tilenight=False,
37
                 all_tiles=False, specstatus_path=None, use_specter=False,
38
                 do_cte_flat=False, complete_tiles_thrunight=None):
39
    """
40
    Creates a processing table and an unprocessed table from a fully populated exposure table and submits those
41
    jobs for processing (unless dry_run is set).
42

43
    Args:
44
        night (int): The night of data to be processed. Exposure table must exist.
45
        proc_obstypes (list or np.array, optional): A list of exposure OBSTYPE's that should be processed (and therefore
46
            added to the processing table).
47
        z_submit_types (list of str or comma-separated list of str, optional): The "group" types of redshifts that should be
48
            submitted with each exposure. If not specified, default for daily processing is
49
            ['cumulative', 'pernight-v0']. If false, 'false', or [], then no redshifts are submitted.
50
        queue (str, optional): The name of the queue to submit the jobs to. Default is "realtime".
51
        reservation (str, optional): The reservation to submit jobs to. If None, it is not submitted to a reservation.
52
        system_name (str): batch system name, e.g. cori-haswell, cori-knl, perlmutter-gpu
53
        exp_table_path (str): Full path to where to exposure tables are stored, WITHOUT the monthly directory included.
54
        proc_table_path (str): Full path to where to processing tables to be written.
55
        tab_filetype (str, optional): The file extension (without the '.') of the exposure and processing tables.
56
        dry_run_level (int, optional): If nonzero, this is a simulated run. If dry_run=1 the scripts will be written but not submitted.
57
            If dry_run=2, the scripts will not be written nor submitted. Logging will remain the same
58
            for testing as though scripts are being submitted. Default is 0 (false).
59
        dry_run (bool, optional): When to run without submitting scripts or not. If dry_run_level is defined, then it over-rides
60
            this flag. dry_run_level not set and dry_run=True, dry_run_level is set to 2 (no scripts
61
            generated or run). Default for dry_run is False.
62
        no_redshifts (bool, optional): Whether to submit redshifts or not. If True, redshifts are not submitted.
63
        error_if_not_available (bool, optional): Default is True. Raise as error if the required exposure table doesn't exist,
64
            otherwise prints an error and returns.
65
        append_to_proc_table (bool, optional): True if you want to submit jobs even if a processing table already exists.
66
            Otherwise jobs will be appended to it. Default is False
67
        ignore_proc_table_failures (bool, optional): True if you want to submit other jobs even the loaded
68
            processing table has incomplete jobs in it. Use with caution. Default is False.
69
        dont_check_job_outputs (bool, optional): Default is False. If False, the code checks for the existence of the expected final
70
            data products for the script being submitted. If all files exist and this is False,
71
            then the script will not be submitted. If some files exist and this is False, only the
72
            subset of the cameras without the final data products will be generated and submitted.
73
        dont_resubmit_partial_jobs (bool, optional): Default is False. Must be used with dont_check_job_outputs=False. If this flag is
74
            False, jobs with some prior data are pruned using PROCCAMWORD to only process the
75
            remaining cameras not found to exist.
76
        tiles (array-like, optional): Only submit jobs for these TILEIDs.
77
        surveys (array-like, optional): Only submit science jobs for these surveys (lowercase)
78
        laststeps (array-like, optional): Only submit jobs for exposures with LASTSTEP in these laststeps (lowercase)
79
        use_tilenight (bool, optional): Default is False. If True, use desi_proc_tilenight for prestdstar, stdstar,
80
            and poststdstar steps for science exposures.
81
        all_tiles (bool, optional): Default is False. Set to NOT restrict to completed tiles as defined by
82
            the table pointed to by specstatus_path.
83
        specstatus_path (str, optional): Default is $DESI_SURVEYOPS/ops/tiles-specstatus.ecsv.
84
            Location of the surveyops specstatus table.
85
        use_specter (bool, optional): Default is False. If True, use specter, otherwise use gpu_specter by default.
86
        do_cte_flat (bool, optional): Default is False. If True, one second flat exposures are processed for cte identification.
87
        complete_tiles_thrunight (int, optional): Default is None. Only tiles completed
88
            on or before the supplied YYYYMMDD are considered
89
            completed and will be processed. All complete
90
            tiles are submitted if None or all_tiles is True.
91
    """
92
    log = get_logger()
×
93

94
    ## Recast booleans from double negative
95
    check_for_outputs = (not dont_check_job_outputs)
×
96
    resubmit_partial_complete = (not dont_resubmit_partial_jobs)
×
97

98
    if proc_obstypes is None:
×
99
        proc_obstypes = default_obstypes_for_proctable()
×
100
    print(f"Processing the following obstypes: {proc_obstypes}")
×
101

102
    ## Determine where the exposure table will be written
103
    if exp_table_path is None:
×
104
        exp_table_path = get_exposure_table_path(night=night, usespecprod=True)
×
105
    name = get_exposure_table_name(night=night, extension=tab_filetype)
×
106
    exp_table_pathname = pathjoin(exp_table_path, name)
×
107
    if not os.path.exists(exp_table_pathname):
×
108
        if error_if_not_available:
×
109
            raise IOError(f"Exposure table: {exp_table_pathname} not found. Exiting this night.")
×
110
        else:
111
            print(f"ERROR: Exposure table: {exp_table_pathname} not found. Exiting this night.")
×
112
            return
×
113

114
    ## Determine where the processing table will be written
115
    if proc_table_path is None:
×
116
        proc_table_path = get_processing_table_path()
×
117
    os.makedirs(proc_table_path, exist_ok=True)
×
118
    name = get_processing_table_name(prodmod=night, extension=tab_filetype)
×
119
    proc_table_pathname = pathjoin(proc_table_path, name)
×
120

121
    ## Define the group types of redshifts you want to generate for each tile
122
    if no_redshifts:
×
123
        z_submit_types = None
×
124
    else:
125
        if z_submit_types is None:
×
126
            pass
×
127
        elif isinstance(z_submit_types, str):
×
128
            if z_submit_types.lower() == 'false':
×
129
                z_submit_types = None
×
130
            elif z_submit_types.lower() == 'none':
×
131
                z_submit_types = None
×
132
            else:
133
                z_submit_types = [ztype.strip().lower() for ztype in z_submit_types.split(',')]
×
134
                for ztype in z_submit_types:
×
135
                    if ztype not in ['cumulative', 'pernight-v0', 'pernight', 'perexp']:
×
136
                        raise ValueError(f"Couldn't understand ztype={ztype} in z_submit_types={z_submit_types}.")
×
137
        else:
138
            raise ValueError(f"Couldn't understand z_submit_types={z_submit_types}, type={type(z_submit_types)}.")
×
139

140
    if z_submit_types is None:
×
141
        print("Not submitting scripts for redshift fitting")
×
142
    else:
143
        print(f"Redshift fitting with redshift group types: {z_submit_types}")
×
144

145
    ## Reconcile the dry_run and dry_run_level
146
    if dry_run and dry_run_level == 0:
×
147
        dry_run_level = 2
×
148
    elif dry_run_level > 0:
×
149
        dry_run = True
×
150

151
    ## If laststeps not defined, default is only LASTSTEP=='all' exposures for non-tilenight runs
152
    tilenight_laststeps = laststeps
×
153
    if laststeps is None:
×
154
        laststeps = ['all',]
×
155
    else:
156
        laststep_options = get_last_step_options()
×
157
        for laststep in laststeps:
×
158
            if laststep not in laststep_options:
×
159
                raise ValueError(f"Couldn't understand laststep={laststep} in laststeps={laststeps}.")
×
160
    print(f"Processing exposures with the following LASTSTEP's: {laststeps}")
×
161

162
    ## Check if night has already been submitted and don't submit if it has, unless told to with ignore_existing
163
    if os.path.exists(proc_table_pathname):
×
164
        if not append_to_proc_table:
×
165
            print(f"ERROR: Processing table: {proc_table_pathname} already exists and not "+
×
166
                  "given flag --append-to-proc-table. Exiting this night.")
167
            return
×
168

169
    ## Determine where the unprocessed data table will be written
170
    unproc_table_pathname = pathjoin(proc_table_path, name.replace('processing', 'unprocessed'))
×
171

172
    ## Combine the table names and types for easier passing to io functions
173
    table_pathnames = [exp_table_pathname, proc_table_pathname]
×
174
    table_types = ['exptable', 'proctable']
×
175

176
    ## Load in the files defined above
177
    etable, ptable = load_tables(tablenames=table_pathnames, tabletypes=table_types)
×
178
    full_etable = etable.copy()
×
179

180
    ## Sort science exposures by TILEID
181
    sciexps = (etable['OBSTYPE']=='science')
×
182
    scisrtd = etable[sciexps].argsort(['TILEID','EXPID'])
×
183
    etable[sciexps] = etable[sciexps][scisrtd]
×
184

185
    ## filter by TILEID if requested
186
    if tiles is not None:
×
187
        log.info(f'Filtering by tiles={tiles}')
×
188
        if etable is not None:
×
189
            keep = np.isin(etable['TILEID'], tiles)
×
190
            etable = etable[keep]
×
191
        #if ptable is not None:
192
        #    keep = np.isin(ptable['TILEID'], tiles)
193
        #    ptable = ptable[keep]
194

195
    if surveys is not None:
×
196
        log.info(f'Filtering by surveys={surveys}')
×
197
        if etable is not None:
×
198
            if 'SURVEY' not in etable.dtype.names:
×
199
                raise ValueError(f'surveys={surveys} filter requested, but no SURVEY column in {exp_table_pathname}')
×
200

201
            # only apply survey filter to OBSTYPE=science exposures, i.e. auto-keep non-science
202
            keep = (etable['OBSTYPE'] != 'science')
×
203

204
            # np.isin doesn't work with bytes vs. str from Tables but direct comparison does, so loop
205
            for survey in surveys:
×
206
                keep |= etable['SURVEY'] == survey
×
207

208
            etable = etable[keep]
×
209
        #if ptable is not None:
210
        #    # ptable doesn't have "SURVEY", so filter by the TILEIDs we just kept
211
        #    keep = np.isin(ptable['TILEID'], etable['TILEID'])
212
        #    ptable = ptable[keep]
213

214
    ## If asked to do so, only process tiles deemed complete by the specstatus file
215
    if not all_tiles:
×
216
        all_completed_tiles = get_completed_tiles(specstatus_path,
×
217
                                              complete_tiles_thrunight=complete_tiles_thrunight)
218

219
        ## Add -99 to keep calibration exposures
220
        all_completed_tiles_withcalib = np.append([-99], all_completed_tiles)
×
221
        if etable is not None:
×
222
            keep = np.isin(etable['TILEID'], all_completed_tiles_withcalib)
×
223
            sciselect = np.isin(etable['TILEID'], all_completed_tiles)
×
224
            completed_tiles = np.unique(etable['TILEID'][keep])
×
225
            sci_tiles = np.unique(etable['TILEID'][sciselect])
×
226
            log.info(f"Processing completed science tiles: {', '.join(sci_tiles.astype(str))}")
×
227
            log.info(f"Filtering by completed tiles retained {len(sci_tiles)}/{sum(np.unique(etable['TILEID'])>0)} science tiles")
×
228
            log.info(f"Filtering by completed tiles retained {sum(sciselect)}/{sum(etable['TILEID']>0)} science exposures")
×
229
            etable = etable[keep]
×
230

231
    ## Cut on LASTSTEP
232
    good_exps = np.isin(np.array(etable['LASTSTEP']).astype(str), laststeps)
×
233
    etable = etable[good_exps]
×
234

235
    ## For cumulative redshifts, identify tiles for which this is the last night that they were observed
236
    tiles_cumulative = list()
×
237
    if z_submit_types is not None and 'cumulative' in z_submit_types:
×
238
        tiles_this_night = np.unique(np.asarray(etable['TILEID']))
×
239
        tiles_this_night = tiles_this_night[tiles_this_night>0]  # science tiles, not calibs
×
240
        allexp = read_minimal_exptables_columns(tileids=tiles_this_night)
×
241
        for tileid in tiles_this_night:
×
242
            nights_with_tile = allexp['NIGHT'][allexp['TILEID'] == tileid]
×
243
            if len(nights_with_tile) > 0 and night == np.max(nights_with_tile):
×
244
                tiles_cumulative.append(tileid)
×
245

246
        log.info(f'Submitting cumulative redshifts for {len(tiles_cumulative)}/{len(tiles_this_night)} tiles '
×
247
                 f'for which {night} is the last night: {tiles_cumulative}')
248

249
    ## Count zeros before trimming by OBSTYPE since they are used for
250
    ## nightly bias even if they aren't processed individually
251
    num_zeros = np.sum([erow['OBSTYPE'] == 'zero' and
×
252
                       (erow['PROGRAM'].startswith('calib zeros') or erow['PROGRAM'].startswith('zeros for dark'))
253
                       for erow in etable])
254

255
    ## Cut on OBSTYPES
256
    good_types = np.isin(np.array(etable['OBSTYPE']).astype(str), proc_obstypes)
×
257
    etable = etable[good_types]
×
258

259
    ## Cut on EXPTIME
260
    good_exptimes = []
×
261
    already_found_cte_flat = False
×
262
    for erow in etable:
×
263
        if erow['OBSTYPE'] == 'science' and erow['EXPTIME'] < 60:
×
264
            good_exptimes.append(False)
×
265
        elif erow['OBSTYPE'] == 'arc' and erow['EXPTIME'] > 8.:
×
266
            good_exptimes.append(False)
×
267
        elif erow['OBSTYPE'] == 'dark' and np.abs(float(erow['EXPTIME']) - 300.) > 1:
×
268
            good_exptimes.append(False)
×
269
        elif erow['OBSTYPE'] == 'flat' and np.abs(float(erow['EXPTIME']) - 120.) > 1:
×
270
            if do_cte_flat and not already_found_cte_flat \
×
271
               and np.abs(float(erow['EXPTIME']) - 1.) < 0.5:
272
                good_exptimes.append(True)
×
273
                already_found_cte_flat = True
×
274
            else:
275
                good_exptimes.append(False)
×
276
        else:
277
            good_exptimes.append(True)
×
278
    etable = etable[np.array(good_exptimes)]
×
279

280
    ## Simple table organization to ensure cals processed first
281
    ## To be eventually replaced by more sophisticated cal selection
282
    ## Get one dark first
283
    isdarkcal = np.array([(erow['OBSTYPE'] == 'dark' and 'calib' in
×
284
                          erow['PROGRAM']) for erow in etable])
285
    isdark = np.array([(erow['OBSTYPE'] == 'dark') for erow in etable])
×
286

287
    ## If a cal, want to select that but ignore all other darks
288
    ## elif only a dark sequence, use that
289
    if np.sum(isdarkcal)>0:
×
290
        wheredark = np.where(isdarkcal)[0]
×
291
        ## note this is ~isdark because want to get rid of all other darks
292
        etable = vstack([etable[wheredark[0]], etable[~isdark]])
×
293
    elif np.sum(isdark)>0:
×
294
        wheredark = np.where(isdark)[0]
×
295
        etable = vstack([etable[wheredark[0]], etable[~isdark]])
×
296

297
    ## Then get rest of the cals above scis
298
    issci = (etable['OBSTYPE'] == 'science')
×
299
    etable = vstack([etable[~issci], etable[issci]])
×
300

301
    ## Get relevant data from the tables
302
    arcs, flats, sciences, calibjobs, curtype, lasttype, \
×
303
    curtile, lasttile, internal_id = parse_previous_tables(etable, ptable, night)
304
    if len(ptable) > 0:
×
305
        ptable = update_from_queue(ptable, dry_run=0)
×
306
        if dry_run_level < 3:
×
307
            write_table(ptable, tablename=proc_table_pathname)
×
308
        if any_jobs_not_complete(ptable['STATUS']):
×
309
            if not ignore_proc_table_failures:
×
310
                print("ERROR: Some jobs have an incomplete job status. This script "
×
311
                      + "will not fix them. You should remedy those first. "
312
                      + "To proceed anyway use '--ignore-proc-table-failures'. Exiting.")
313
                return
×
314
            else:
315
                print("Warning: Some jobs have an incomplete job status, but "
×
316
                      + "you entered '--ignore-proc-table-failures'. This "
317
                      + "script will not fix them. "
318
                      + "You should have fixed those first. Proceeding...")
319
        ptable_expids = np.unique(np.concatenate(ptable['EXPID']))
×
320
        if len(set(etable['EXPID']).difference(set(ptable_expids))) == 0:
×
321
            print("All EXPID's already present in processing table, nothing to run. Exiting")
×
322
            return
×
323
    else:
324
        ptable_expids = np.array([], dtype=int)
×
325

326
    tableng = len(ptable)
×
327

328
    ## Now figure out everything that isn't in the final list, which we'll
329
    ## Write out to the unproccessed table
330
    toprocess = np.isin(full_etable['EXPID'], etable['EXPID'])
×
331
    processed = np.isin(full_etable['EXPID'], ptable_expids)
×
332
    unproc_table = full_etable[~(toprocess|processed)]
×
333

334
    ## Done determining what not to process, so write out unproc file
335
    if dry_run_level < 3:
×
336
        write_table(unproc_table, tablename=unproc_table_pathname)
×
337

338
    ## If just starting out and no dark, do the nightlybias
339
    do_bias = ('bias' in proc_obstypes or 'dark' in proc_obstypes) and num_zeros>0
×
340
    if tableng == 0 and np.sum(isdark) == 0 and do_bias:
×
341
        print("\nNo dark found. Submitting nightlybias before processing exposures.\n")
×
342
        prow = default_prow()
×
343
        prow['INTID'] = internal_id
×
344
        prow['OBSTYPE'] = 'zero'
×
345
        internal_id += 1
×
346
        prow['JOBDESC'] = 'nightlybias'
×
347
        prow['NIGHT'] = night
×
348
        prow['CALIBRATOR'] = 1
×
349
        cams = set(decode_camword('a0123456789'))
×
350
        for row in unproc_table:
×
351
            if row['OBSTYPE'] == 'zero' and 'calib' in row['PROGRAM']:
×
352
                proccamword = difference_camwords(row['CAMWORD'], row['BADCAMWORD'])
×
353
                cams = cams.intersection(set(decode_camword(proccamword)))
×
354
        prow['PROCCAMWORD'] = create_camword(list(cams))
×
355
        prow = create_and_submit(prow, dry_run=dry_run_level, queue=queue,
×
356
                                 reservation=reservation,
357
                                 strictly_successful=True,
358
                                 check_for_outputs=check_for_outputs,
359
                                 resubmit_partial_complete=resubmit_partial_complete,
360
                                 system_name=system_name)
361
        calibjobs['nightlybias'] = prow.copy()
×
362
        ## Add the processing row to the processing table
363
        ptable.add_row(prow)
×
364
        ## Write out the processing table
365
        if dry_run_level < 3:
×
366
            write_table(ptable, tablename=proc_table_pathname)
×
367
            sleep_and_report(2, message_suffix=f"after nightlybias",
×
368
                             dry_run=dry_run)
369

370
    ## Loop over new exposures and process them as relevant to that type
371
    for ii, erow in enumerate(etable):
×
372
        if erow['EXPID'] in ptable_expids:
×
373
            continue
×
374
        erow = table_row_to_dict(erow)
×
375
        exp = int(erow['EXPID'])
×
376
        print(f'\n\n##################### {exp} #########################')
×
377

378
        print(f"\nFound: {erow}")
×
379

380
        curtype, curtile = get_type_and_tile(erow)
×
381

382
        # if this is a new tile/obstype, proceed with submitting all of the jobs for the previous tile
383
        if lasttype is not None and ((curtype != lasttype) or (curtile != lasttile)):
×
384
            # don't submit cumulative redshifts for lasttile if it isn't in tiles_cumulative
385
            if (z_submit_types is not None) and ('cumulative' in z_submit_types) and (lasttile not in tiles_cumulative):
×
386
                cur_z_submit_types = z_submit_types.copy()
×
387
                cur_z_submit_types.remove('cumulative')
×
388
            else:
389
                cur_z_submit_types = z_submit_types
×
390

391
            # If done with science exposures for a tile and use_tilenight==True, use
392
            # submit_tilenight_and_redshifts, otherwise use checkfor_and_submit_joint_job
393
            if use_tilenight and lasttype == 'science' and len(sciences)>0:
×
394
                ptable, sciences, internal_id \
×
395
                    = submit_tilenight_and_redshifts(ptable, sciences, calibjobs, lasttype, internal_id,
396
                                                    dry_run=dry_run_level,
397
                                                    queue=queue,
398
                                                    reservation=reservation,
399
                                                    strictly_successful=True,
400
                                                    check_for_outputs=check_for_outputs,
401
                                                    resubmit_partial_complete=resubmit_partial_complete,
402
                                                    z_submit_types=cur_z_submit_types,
403
                                                    system_name=system_name,use_specter=use_specter,
404
                                                    laststeps=tilenight_laststeps)
405
            else:
406
                ## If running redshifts and there is a future exposure of the same tile
407
                ## then only run per exposure redshifts until then
408
                if lasttype == 'science' and z_submit_types is not None and not use_tilenight:
×
409
                    tile_exps = etable['EXPID'][((etable['TILEID'] == lasttile) &
×
410
                                                 (etable['LASTSTEP'] == 'all'))]
411
                    unprocd_exps = [exp not in ptable_expids for exp in tile_exps]
×
412
                    if np.any(unprocd_exps):
×
413
                        print(f"Identified that tile {lasttile} has future exposures"
×
414
                            + f" for this night. Not submitting full night "
415
                            + f"redshift jobs.")
416
                        if 'perexp' in z_submit_types:
×
417
                            print("Still submitting perexp redshifts")
×
418
                            cur_z_submit_types = ['perexp']
×
419
                        else:
420
                            cur_z_submit_types = None
×
421
                ptable, calibjobs, sciences, internal_id \
×
422
                    = checkfor_and_submit_joint_job(ptable, arcs, flats, sciences,
423
                                                calibjobs,
424
                                                lasttype, internal_id,
425
                                                dry_run=dry_run_level,
426
                                                queue=queue,
427
                                                reservation=reservation,
428
                                                strictly_successful=True,
429
                                                check_for_outputs=check_for_outputs,
430
                                                resubmit_partial_complete=resubmit_partial_complete,
431
                                                z_submit_types=cur_z_submit_types,
432
                                                system_name=system_name)
433

434
        prow = erow_to_prow(erow)
×
435
        prow['INTID'] = internal_id
×
436
        internal_id += 1
×
437
        if prow['OBSTYPE'] == 'dark':
×
438
            if num_zeros == 0:
×
439
                prow['JOBDESC'] = 'badcol'   # process dark for bad columns even if we don't have zeros for nightlybias
×
440
            else:
441
                prow['JOBDESC'] = 'ccdcalib' # ccdcalib = nightlybias(zeros) + badcol(dark)
×
442
        else:
443
            prow['JOBDESC'] = prow['OBSTYPE']
×
444
        prow = define_and_assign_dependency(prow, calibjobs)
×
445
        if (not use_tilenight) or erow['OBSTYPE'] != 'science':
×
446
            print(f"\nProcessing: {prow}\n")
×
447
            prow = create_and_submit(prow, dry_run=dry_run_level, queue=queue,
×
448
                                 reservation=reservation, strictly_successful=True,
449
                                 check_for_outputs=check_for_outputs,
450
                                 resubmit_partial_complete=resubmit_partial_complete,
451
                                 system_name=system_name,use_specter=use_specter)
452

453
            ## If processed a dark, assign that to the dark job
454
            if curtype == 'dark':
×
455
                prow['CALIBRATOR'] = 1
×
456
                calibjobs[prow['JOBDESC']] = prow.copy()
×
457

458
            ## Add the processing row to the processing table
459
            ptable.add_row(prow)
×
460

461
        ptable_expids = np.append(ptable_expids, erow['EXPID'])
×
462

463
        ## Note: Assumption here on number of flats
464
        if curtype == 'flat' and calibjobs['nightlyflat'] is None \
×
465
                and int(erow['SEQTOT']) < 5 \
466
                and np.abs(float(erow['EXPTIME'])-120.) < 1.:
467
            flats.append(prow)
×
468
        elif curtype == 'arc' and calibjobs['psfnight'] is None:
×
469
            arcs.append(prow)
×
470
        elif curtype == 'science' and (prow['LASTSTEP'] != 'skysub' or use_tilenight):
×
471
            sciences.append(prow)
×
472

473
        lasttile = curtile
×
474
        lasttype = curtype
×
475

476
        tableng = len(ptable)
×
477
        if tableng > 0 and ii % 1 == 0 and dry_run_level < 3:
×
478
            write_table(ptable, tablename=proc_table_pathname)
×
479

480
        sleep_and_report(1, message_suffix=f"to slow down the queue submission rate",
×
481
                         dry_run=dry_run)
482

483
        ## Flush the outputs
484
        sys.stdout.flush()
×
485
        sys.stderr.flush()
×
486

487
    if tableng > 0:
×
488
        ## No more data coming in, so do bottleneck steps if any apply
489

490
        # don't submit cumulative redshifts for lasttile if it isn't in tiles_cumulative
491
        if (z_submit_types is not None) and ('cumulative' in z_submit_types) and (lasttile not in tiles_cumulative):
×
492
            cur_z_submit_types = z_submit_types.copy()
×
493
            cur_z_submit_types.remove('cumulative')
×
494
        else:
495
            cur_z_submit_types = z_submit_types
×
496

497
        if use_tilenight and len(sciences)>0:
×
498
            ptable, sciences, internal_id \
×
499
                = submit_tilenight_and_redshifts(ptable, sciences, calibjobs, lasttype, internal_id,
500
                                                dry_run=dry_run_level,
501
                                                queue=queue,
502
                                                reservation=reservation,
503
                                                strictly_successful=True,
504
                                                check_for_outputs=check_for_outputs,
505
                                                resubmit_partial_complete=resubmit_partial_complete,
506
                                                z_submit_types=cur_z_submit_types,
507
                                                system_name=system_name,use_specter=use_specter)
508
        else:
509
            ptable, calibjobs, sciences, internal_id \
×
510
                = checkfor_and_submit_joint_job(ptable, arcs, flats, sciences, calibjobs,
511
                                            lasttype, internal_id, dry_run=dry_run_level,
512
                                            queue=queue, reservation=reservation,
513
                                            strictly_successful=True,
514
                                            check_for_outputs=check_for_outputs,
515
                                            resubmit_partial_complete=resubmit_partial_complete,
516
                                            z_submit_types=cur_z_submit_types,
517
                                            system_name=system_name)
518
        ## All jobs now submitted, update information from job queue and save
519
        ptable = update_from_queue(ptable, dry_run=dry_run_level)
×
520
        if dry_run_level < 3:
×
521
            write_table(ptable, tablename=proc_table_pathname)
×
522

523
    print(f"Completed submission of exposures for night {night}.", '\n\n\n')
×
524

525

526
def get_completed_tiles(specstatus_path=None, complete_tiles_thrunight=None):
1✔
527
    """
528
    Uses a tiles-specstatus.ecsv file and selection criteria to determine
529
    what tiles have beeen completed. Takes an optional argument to point
530
    to a custom specstatus file. Returns an array of TILEID's.
531

532
    Args:
533
        specstatus_path, str, optional. Default is $DESI_SURVEYOPS/ops/tiles-specstatus.ecsv.
534
            Location of the surveyops specstatus table.
535
        complete_tiles_thrunight, int, optional. Default is None. Only tiles completed
536
            on or before the supplied YYYYMMDD are considered
537
            completed and will be processed. All complete
538
            tiles are submitted if None.
539

540
    Returns:
541
        array-like. The tiles from the specstatus file determined by the
542
        selection criteria to be completed.
543
    """
544
    log = get_logger()
1✔
545
    if specstatus_path is None:
1✔
546
        if 'DESI_SURVEYOPS' not in os.environ:
×
547
            raise ValueError("DESI_SURVEYOPS is not defined in your environment. " +
×
548
                             "You must set it or specify --specstatus-path explicitly.")
549
        specstatus_path = os.path.join(os.environ['DESI_SURVEYOPS'], 'ops',
×
550
                                       'tiles-specstatus.ecsv')
551
        log.info(f"specstatus_path not defined, setting default to {specstatus_path}.")
×
552
    if not os.path.exists(specstatus_path):
1✔
553
        raise IOError(f"Couldn't find {specstatus_path}.")
×
554
    specstatus = Table.read(specstatus_path)
1✔
555

556
    ## good tile selection
557
    iszdone = (specstatus['ZDONE'] == 'true')
1✔
558
    isnotmain = (specstatus['SURVEY'] != 'main')
1✔
559
    enoughfraction = 0.1  # 10% rather than specstatus['MINTFRAC']
1✔
560
    isenoughtime = (specstatus['EFFTIME_SPEC'] >
1✔
561
                    specstatus['GOALTIME'] * enoughfraction)
562
    ## only take the approved QA tiles in main
563
    goodtiles = iszdone
1✔
564
    ## not all special and cmx/SV tiles have zdone set, so also pass those with enough time
565
    goodtiles |= (isenoughtime & isnotmain)
1✔
566
    ## main backup also don't have zdone set, so also pass those with enough time
567
    goodtiles |= (isenoughtime & (specstatus['FAPRGRM'] == 'backup'))
1✔
568

569
    if complete_tiles_thrunight is not None:
1✔
570
        goodtiles &= (specstatus['LASTNIGHT'] <= complete_tiles_thrunight)
×
571

572
    return np.array(specstatus['TILEID'][goodtiles])
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc