• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

desihub / desispec / 4876932652

pending completion
4876932652

Pull #2036

github-actions

GitHub
Merge cf8a7fb50 into 5acfced85
Pull Request #2036: Add option to process cumulative redshifts for all tiles in desi_run_night

10 of 10 new or added lines in 1 file covered. (100.0%)

10653 of 43752 relevant lines covered (24.35%)

0.24 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

11.43
/py/desispec/scripts/submit_night.py
1
"""
2
desispec.scripts.submit_night
3
=============================
4

5
"""
6
from desiutil.log import get_logger
1✔
7
import numpy as np
1✔
8
import os
1✔
9
import sys
1✔
10
import time
1✔
11
import re
1✔
12
from astropy.table import Table, vstack
1✔
13
## Import some helper functions, you can see their definitions by uncomenting the bash shell command
14
from desispec.workflow.tableio import load_tables, write_table
1✔
15
from desispec.workflow.utils import pathjoin, sleep_and_report
1✔
16
from desispec.workflow.timing import what_night_is_it
1✔
17
from desispec.workflow.exptable import get_exposure_table_path, \
1✔
18
    get_exposure_table_name, get_last_step_options
19
from desispec.workflow.proctable import default_obstypes_for_proctable, get_processing_table_path, \
1✔
20
                                        get_processing_table_name, erow_to_prow, table_row_to_dict, \
21
                                        default_prow
22
from desispec.workflow.procfuncs import parse_previous_tables, get_type_and_tile, \
1✔
23
                                        define_and_assign_dependency, create_and_submit, \
24
                                        checkfor_and_submit_joint_job, submit_tilenight_and_redshifts
25
from desispec.workflow.queue import update_from_queue, any_jobs_not_complete
1✔
26
from desispec.workflow.desi_proc_funcs import get_desi_proc_batch_file_path
1✔
27
from desispec.workflow.redshifts import read_minimal_exptables_columns
1✔
28
from desispec.io.util import decode_camword, difference_camwords, create_camword
1✔
29

30
def submit_night(night, proc_obstypes=None, z_submit_types=None, queue='realtime',
1✔
31
                 reservation=None, system_name=None,
32
                 exp_table_path=None, proc_table_path=None, tab_filetype='csv',
33
                 dry_run_level=0, dry_run=False, no_redshifts=False, error_if_not_available=True,
34
                 append_to_proc_table=False, ignore_proc_table_failures = False,
35
                 dont_check_job_outputs=False, dont_resubmit_partial_jobs=False,
36
                 tiles=None, surveys=None, laststeps=None, use_tilenight=False,
37
                 all_tiles=False, specstatus_path=None, use_specter=False,
38
                 do_cte_flat=False, complete_tiles_thrunight=None,
39
                 all_cumulatives=False):
40
    """
41
    Creates a processing table and an unprocessed table from a fully populated exposure table and submits those
42
    jobs for processing (unless dry_run is set).
43

44
    Args:
45
        night (int): The night of data to be processed. Exposure table must exist.
46
        proc_obstypes (list or np.array, optional): A list of exposure OBSTYPE's that should be processed (and therefore
47
            added to the processing table).
48
        z_submit_types (list of str or comma-separated list of str, optional): The "group" types of redshifts that should be
49
            submitted with each exposure. If not specified, default for daily processing is
50
            ['cumulative', 'pernight-v0']. If false, 'false', or [], then no redshifts are submitted.
51
        queue (str, optional): The name of the queue to submit the jobs to. Default is "realtime".
52
        reservation (str, optional): The reservation to submit jobs to. If None, it is not submitted to a reservation.
53
        system_name (str): batch system name, e.g. cori-haswell, cori-knl, perlmutter-gpu
54
        exp_table_path (str): Full path to where to exposure tables are stored, WITHOUT the monthly directory included.
55
        proc_table_path (str): Full path to where to processing tables to be written.
56
        tab_filetype (str, optional): The file extension (without the '.') of the exposure and processing tables.
57
        dry_run_level (int, optional): If nonzero, this is a simulated run. If dry_run=1 the scripts will be written but not submitted.
58
            If dry_run=2, the scripts will not be written nor submitted. Logging will remain the same
59
            for testing as though scripts are being submitted. Default is 0 (false).
60
        dry_run (bool, optional): When to run without submitting scripts or not. If dry_run_level is defined, then it over-rides
61
            this flag. dry_run_level not set and dry_run=True, dry_run_level is set to 2 (no scripts
62
            generated or run). Default for dry_run is False.
63
        no_redshifts (bool, optional): Whether to submit redshifts or not. If True, redshifts are not submitted.
64
        error_if_not_available (bool, optional): Default is True. Raise as error if the required exposure table doesn't exist,
65
            otherwise prints an error and returns.
66
        append_to_proc_table (bool, optional): True if you want to submit jobs even if a processing table already exists.
67
            Otherwise jobs will be appended to it. Default is False
68
        ignore_proc_table_failures (bool, optional): True if you want to submit other jobs even the loaded
69
            processing table has incomplete jobs in it. Use with caution. Default is False.
70
        dont_check_job_outputs (bool, optional): Default is False. If False, the code checks for the existence of the expected final
71
            data products for the script being submitted. If all files exist and this is False,
72
            then the script will not be submitted. If some files exist and this is False, only the
73
            subset of the cameras without the final data products will be generated and submitted.
74
        dont_resubmit_partial_jobs (bool, optional): Default is False. Must be used with dont_check_job_outputs=False. If this flag is
75
            False, jobs with some prior data are pruned using PROCCAMWORD to only process the
76
            remaining cameras not found to exist.
77
        tiles (array-like, optional): Only submit jobs for these TILEIDs.
78
        surveys (array-like, optional): Only submit science jobs for these surveys (lowercase)
79
        laststeps (array-like, optional): Only submit jobs for exposures with LASTSTEP in these laststeps (lowercase)
80
        use_tilenight (bool, optional): Default is False. If True, use desi_proc_tilenight for prestdstar, stdstar,
81
            and poststdstar steps for science exposures.
82
        all_tiles (bool, optional): Default is False. Set to NOT restrict to completed tiles as defined by
83
            the table pointed to by specstatus_path.
84
        specstatus_path (str, optional): Default is $DESI_SURVEYOPS/ops/tiles-specstatus.ecsv.
85
            Location of the surveyops specstatus table.
86
        use_specter (bool, optional): Default is False. If True, use specter, otherwise use gpu_specter by default.
87
        do_cte_flat (bool, optional): Default is False. If True, one second flat exposures are processed for cte identification.
88
        complete_tiles_thrunight (int, optional): Default is None. Only tiles completed
89
            on or before the supplied YYYYMMDD are considered
90
            completed and will be processed. All complete
91
            tiles are submitted if None or all_tiles is True.
92
        all_cumulatives (bool, optional): Default is False. Set to run cumulative redshifts for all tiles
93
            even if the tile has observations on a later night.
94
    """
95
    log = get_logger()
×
96

97
    ## Recast booleans from double negative
98
    check_for_outputs = (not dont_check_job_outputs)
×
99
    resubmit_partial_complete = (not dont_resubmit_partial_jobs)
×
100

101
    if proc_obstypes is None:
×
102
        proc_obstypes = default_obstypes_for_proctable()
×
103
    print(f"Processing the following obstypes: {proc_obstypes}")
×
104

105
    ## Determine where the exposure table will be written
106
    if exp_table_path is None:
×
107
        exp_table_path = get_exposure_table_path(night=night, usespecprod=True)
×
108
    name = get_exposure_table_name(night=night, extension=tab_filetype)
×
109
    exp_table_pathname = pathjoin(exp_table_path, name)
×
110
    if not os.path.exists(exp_table_pathname):
×
111
        if error_if_not_available:
×
112
            raise IOError(f"Exposure table: {exp_table_pathname} not found. Exiting this night.")
×
113
        else:
114
            print(f"ERROR: Exposure table: {exp_table_pathname} not found. Exiting this night.")
×
115
            return
×
116

117
    ## Determine where the processing table will be written
118
    if proc_table_path is None:
×
119
        proc_table_path = get_processing_table_path()
×
120
    os.makedirs(proc_table_path, exist_ok=True)
×
121
    name = get_processing_table_name(prodmod=night, extension=tab_filetype)
×
122
    proc_table_pathname = pathjoin(proc_table_path, name)
×
123

124
    ## Define the group types of redshifts you want to generate for each tile
125
    if no_redshifts:
×
126
        z_submit_types = None
×
127
    else:
128
        if z_submit_types is None:
×
129
            pass
×
130
        elif isinstance(z_submit_types, str):
×
131
            if z_submit_types.lower() == 'false':
×
132
                z_submit_types = None
×
133
            elif z_submit_types.lower() == 'none':
×
134
                z_submit_types = None
×
135
            else:
136
                z_submit_types = [ztype.strip().lower() for ztype in z_submit_types.split(',')]
×
137
                for ztype in z_submit_types:
×
138
                    if ztype not in ['cumulative', 'pernight-v0', 'pernight', 'perexp']:
×
139
                        raise ValueError(f"Couldn't understand ztype={ztype} in z_submit_types={z_submit_types}.")
×
140
        else:
141
            raise ValueError(f"Couldn't understand z_submit_types={z_submit_types}, type={type(z_submit_types)}.")
×
142

143
    if z_submit_types is None:
×
144
        print("Not submitting scripts for redshift fitting")
×
145
    else:
146
        print(f"Redshift fitting with redshift group types: {z_submit_types}")
×
147

148
    ## Reconcile the dry_run and dry_run_level
149
    if dry_run and dry_run_level == 0:
×
150
        dry_run_level = 2
×
151
    elif dry_run_level > 0:
×
152
        dry_run = True
×
153

154
    ## If laststeps not defined, default is only LASTSTEP=='all' exposures for non-tilenight runs
155
    tilenight_laststeps = laststeps
×
156
    if laststeps is None:
×
157
        laststeps = ['all',]
×
158
    else:
159
        laststep_options = get_last_step_options()
×
160
        for laststep in laststeps:
×
161
            if laststep not in laststep_options:
×
162
                raise ValueError(f"Couldn't understand laststep={laststep} in laststeps={laststeps}.")
×
163
    print(f"Processing exposures with the following LASTSTEP's: {laststeps}")
×
164

165
    ## Check if night has already been submitted and don't submit if it has, unless told to with ignore_existing
166
    if os.path.exists(proc_table_pathname):
×
167
        if not append_to_proc_table:
×
168
            print(f"ERROR: Processing table: {proc_table_pathname} already exists and not "+
×
169
                  "given flag --append-to-proc-table. Exiting this night.")
170
            return
×
171

172
    ## Determine where the unprocessed data table will be written
173
    unproc_table_pathname = pathjoin(proc_table_path, name.replace('processing', 'unprocessed'))
×
174

175
    ## Combine the table names and types for easier passing to io functions
176
    table_pathnames = [exp_table_pathname, proc_table_pathname]
×
177
    table_types = ['exptable', 'proctable']
×
178

179
    ## Load in the files defined above
180
    etable, ptable = load_tables(tablenames=table_pathnames, tabletypes=table_types)
×
181
    full_etable = etable.copy()
×
182

183
    ## Sort science exposures by TILEID
184
    sciexps = (etable['OBSTYPE']=='science')
×
185
    scisrtd = etable[sciexps].argsort(['TILEID','EXPID'])
×
186
    etable[sciexps] = etable[sciexps][scisrtd]
×
187

188
    ## filter by TILEID if requested
189
    if tiles is not None:
×
190
        log.info(f'Filtering by tiles={tiles}')
×
191
        if etable is not None:
×
192
            keep = np.isin(etable['TILEID'], tiles)
×
193
            etable = etable[keep]
×
194
        #if ptable is not None:
195
        #    keep = np.isin(ptable['TILEID'], tiles)
196
        #    ptable = ptable[keep]
197

198
    if surveys is not None:
×
199
        log.info(f'Filtering by surveys={surveys}')
×
200
        if etable is not None:
×
201
            if 'SURVEY' not in etable.dtype.names:
×
202
                raise ValueError(f'surveys={surveys} filter requested, but no SURVEY column in {exp_table_pathname}')
×
203

204
            # only apply survey filter to OBSTYPE=science exposures, i.e. auto-keep non-science
205
            keep = (etable['OBSTYPE'] != 'science')
×
206

207
            # np.isin doesn't work with bytes vs. str from Tables but direct comparison does, so loop
208
            for survey in surveys:
×
209
                keep |= etable['SURVEY'] == survey
×
210

211
            etable = etable[keep]
×
212
        #if ptable is not None:
213
        #    # ptable doesn't have "SURVEY", so filter by the TILEIDs we just kept
214
        #    keep = np.isin(ptable['TILEID'], etable['TILEID'])
215
        #    ptable = ptable[keep]
216

217
    ## If asked to do so, only process tiles deemed complete by the specstatus file
218
    if not all_tiles:
×
219
        all_completed_tiles = get_completed_tiles(specstatus_path,
×
220
                                              complete_tiles_thrunight=complete_tiles_thrunight)
221

222
        ## Add -99 to keep calibration exposures
223
        all_completed_tiles_withcalib = np.append([-99], all_completed_tiles)
×
224
        if etable is not None:
×
225
            keep = np.isin(etable['TILEID'], all_completed_tiles_withcalib)
×
226
            sciselect = np.isin(etable['TILEID'], all_completed_tiles)
×
227
            completed_tiles = np.unique(etable['TILEID'][keep])
×
228
            sci_tiles = np.unique(etable['TILEID'][sciselect])
×
229
            log.info(f"Processing completed science tiles: {', '.join(sci_tiles.astype(str))}")
×
230
            log.info(f"Filtering by completed tiles retained {len(sci_tiles)}/{sum(np.unique(etable['TILEID'])>0)} science tiles")
×
231
            log.info(f"Filtering by completed tiles retained {sum(sciselect)}/{sum(etable['TILEID']>0)} science exposures")
×
232
            etable = etable[keep]
×
233

234
    ## Cut on LASTSTEP
235
    good_exps = np.isin(np.array(etable['LASTSTEP']).astype(str), laststeps)
×
236
    etable = etable[good_exps]
×
237

238
    ## For cumulative redshifts, identify tiles for which this is the last night that they were observed
239
    tiles_cumulative = list()
×
240
    if z_submit_types is not None and 'cumulative' in z_submit_types:
×
241
        tiles_this_night = np.unique(np.asarray(etable['TILEID']))
×
242
        tiles_this_night = tiles_this_night[tiles_this_night > 0]  # science tiles, not calibs
×
243
        if all_cumulatives:
×
244
            tiles_cumulative = list(tiles_this_night)
×
245
            log.info(f'Submitting cumulative redshifts for all tiles: {tiles_cumulative}')
×
246
        else:
247
            allexp = read_minimal_exptables_columns(tileids=tiles_this_night)
×
248
            for tileid in tiles_this_night:
×
249
                nights_with_tile = allexp['NIGHT'][allexp['TILEID'] == tileid]
×
250
                if len(nights_with_tile) > 0 and night == np.max(nights_with_tile):
×
251
                    tiles_cumulative.append(tileid)
×
252
            log.info(f'Submitting cumulative redshifts for {len(tiles_cumulative)}/{len(tiles_this_night)} tiles '
×
253
                     f'for which {night} is the last night: {tiles_cumulative}')
254

255
    ## Count zeros before trimming by OBSTYPE since they are used for
256
    ## nightly bias even if they aren't processed individually
257
    num_zeros = np.sum([erow['OBSTYPE'] == 'zero' and
×
258
                       (erow['PROGRAM'].startswith('calib zeros') or erow['PROGRAM'].startswith('zeros for dark'))
259
                       for erow in etable])
260

261
    ## Cut on OBSTYPES
262
    good_types = np.isin(np.array(etable['OBSTYPE']).astype(str), proc_obstypes)
×
263
    etable = etable[good_types]
×
264

265
    ## Cut on EXPTIME
266
    good_exptimes = []
×
267
    already_found_cte_flat = False
×
268
    for erow in etable:
×
269
        if erow['OBSTYPE'] == 'science' and erow['EXPTIME'] < 60:
×
270
            good_exptimes.append(False)
×
271
        elif erow['OBSTYPE'] == 'arc' and erow['EXPTIME'] > 8.:
×
272
            good_exptimes.append(False)
×
273
        elif erow['OBSTYPE'] == 'dark' and np.abs(float(erow['EXPTIME']) - 300.) > 1:
×
274
            good_exptimes.append(False)
×
275
        elif erow['OBSTYPE'] == 'flat' and np.abs(float(erow['EXPTIME']) - 120.) > 1:
×
276
            if do_cte_flat and not already_found_cte_flat \
×
277
               and np.abs(float(erow['EXPTIME']) - 1.) < 0.5:
278
                good_exptimes.append(True)
×
279
                already_found_cte_flat = True
×
280
            else:
281
                good_exptimes.append(False)
×
282
        else:
283
            good_exptimes.append(True)
×
284
    etable = etable[np.array(good_exptimes)]
×
285

286
    ## Simple table organization to ensure cals processed first
287
    ## To be eventually replaced by more sophisticated cal selection
288
    ## Get one dark first
289
    isdarkcal = np.array([(erow['OBSTYPE'] == 'dark' and 'calib' in
×
290
                          erow['PROGRAM']) for erow in etable])
291
    isdark = np.array([(erow['OBSTYPE'] == 'dark') for erow in etable])
×
292

293
    ## If a cal, want to select that but ignore all other darks
294
    ## elif only a dark sequence, use that
295
    if np.sum(isdarkcal)>0:
×
296
        wheredark = np.where(isdarkcal)[0]
×
297
        ## note this is ~isdark because want to get rid of all other darks
298
        etable = vstack([etable[wheredark[0]], etable[~isdark]])
×
299
    elif np.sum(isdark)>0:
×
300
        wheredark = np.where(isdark)[0]
×
301
        etable = vstack([etable[wheredark[0]], etable[~isdark]])
×
302

303
    ## Then get rest of the cals above scis
304
    issci = (etable['OBSTYPE'] == 'science')
×
305
    etable = vstack([etable[~issci], etable[issci]])
×
306

307
    ## Get relevant data from the tables
308
    arcs, flats, sciences, calibjobs, curtype, lasttype, \
×
309
    curtile, lasttile, internal_id = parse_previous_tables(etable, ptable, night)
310
    if len(ptable) > 0:
×
311
        ptable = update_from_queue(ptable, dry_run=0)
×
312
        if dry_run_level < 3:
×
313
            write_table(ptable, tablename=proc_table_pathname)
×
314
        if any_jobs_not_complete(ptable['STATUS']):
×
315
            if not ignore_proc_table_failures:
×
316
                print("ERROR: Some jobs have an incomplete job status. This script "
×
317
                      + "will not fix them. You should remedy those first. "
318
                      + "To proceed anyway use '--ignore-proc-table-failures'. Exiting.")
319
                return
×
320
            else:
321
                print("Warning: Some jobs have an incomplete job status, but "
×
322
                      + "you entered '--ignore-proc-table-failures'. This "
323
                      + "script will not fix them. "
324
                      + "You should have fixed those first. Proceeding...")
325
        ptable_expids = np.unique(np.concatenate(ptable['EXPID']))
×
326
        if len(set(etable['EXPID']).difference(set(ptable_expids))) == 0:
×
327
            print("All EXPID's already present in processing table, nothing to run. Exiting")
×
328
            return
×
329
    else:
330
        ptable_expids = np.array([], dtype=int)
×
331

332
    tableng = len(ptable)
×
333

334
    ## Now figure out everything that isn't in the final list, which we'll
335
    ## Write out to the unproccessed table
336
    toprocess = np.isin(full_etable['EXPID'], etable['EXPID'])
×
337
    processed = np.isin(full_etable['EXPID'], ptable_expids)
×
338
    unproc_table = full_etable[~(toprocess|processed)]
×
339

340
    ## Done determining what not to process, so write out unproc file
341
    if dry_run_level < 3:
×
342
        write_table(unproc_table, tablename=unproc_table_pathname)
×
343

344
    ## If just starting out and no dark, do the nightlybias
345
    do_bias = ('bias' in proc_obstypes or 'dark' in proc_obstypes) and num_zeros>0
×
346
    if tableng == 0 and np.sum(isdark) == 0 and do_bias:
×
347
        print("\nNo dark found. Submitting nightlybias before processing exposures.\n")
×
348
        prow = default_prow()
×
349
        prow['INTID'] = internal_id
×
350
        prow['OBSTYPE'] = 'zero'
×
351
        internal_id += 1
×
352
        prow['JOBDESC'] = 'nightlybias'
×
353
        prow['NIGHT'] = night
×
354
        prow['CALIBRATOR'] = 1
×
355
        cams = set(decode_camword('a0123456789'))
×
356
        for row in unproc_table:
×
357
            if row['OBSTYPE'] == 'zero' and 'calib' in row['PROGRAM']:
×
358
                proccamword = difference_camwords(row['CAMWORD'], row['BADCAMWORD'])
×
359
                cams = cams.intersection(set(decode_camword(proccamword)))
×
360
        prow['PROCCAMWORD'] = create_camword(list(cams))
×
361
        prow = create_and_submit(prow, dry_run=dry_run_level, queue=queue,
×
362
                                 reservation=reservation,
363
                                 strictly_successful=True,
364
                                 check_for_outputs=check_for_outputs,
365
                                 resubmit_partial_complete=resubmit_partial_complete,
366
                                 system_name=system_name)
367
        calibjobs['nightlybias'] = prow.copy()
×
368
        ## Add the processing row to the processing table
369
        ptable.add_row(prow)
×
370
        ## Write out the processing table
371
        if dry_run_level < 3:
×
372
            write_table(ptable, tablename=proc_table_pathname)
×
373
            sleep_and_report(2, message_suffix=f"after nightlybias",
×
374
                             dry_run=dry_run)
375

376
    ## Loop over new exposures and process them as relevant to that type
377
    for ii, erow in enumerate(etable):
×
378
        if erow['EXPID'] in ptable_expids:
×
379
            continue
×
380
        erow = table_row_to_dict(erow)
×
381
        exp = int(erow['EXPID'])
×
382
        print(f'\n\n##################### {exp} #########################')
×
383

384
        print(f"\nFound: {erow}")
×
385

386
        curtype, curtile = get_type_and_tile(erow)
×
387

388
        # if this is a new tile/obstype, proceed with submitting all of the jobs for the previous tile
389
        if lasttype is not None and ((curtype != lasttype) or (curtile != lasttile)):
×
390
            # don't submit cumulative redshifts for lasttile if it isn't in tiles_cumulative
391
            if (z_submit_types is not None) and ('cumulative' in z_submit_types) and (lasttile not in tiles_cumulative):
×
392
                cur_z_submit_types = z_submit_types.copy()
×
393
                cur_z_submit_types.remove('cumulative')
×
394
            else:
395
                cur_z_submit_types = z_submit_types
×
396

397
            # If done with science exposures for a tile and use_tilenight==True, use
398
            # submit_tilenight_and_redshifts, otherwise use checkfor_and_submit_joint_job
399
            if use_tilenight and lasttype == 'science' and len(sciences)>0:
×
400
                ptable, sciences, internal_id \
×
401
                    = submit_tilenight_and_redshifts(ptable, sciences, calibjobs, lasttype, internal_id,
402
                                                    dry_run=dry_run_level,
403
                                                    queue=queue,
404
                                                    reservation=reservation,
405
                                                    strictly_successful=True,
406
                                                    check_for_outputs=check_for_outputs,
407
                                                    resubmit_partial_complete=resubmit_partial_complete,
408
                                                    z_submit_types=cur_z_submit_types,
409
                                                    system_name=system_name,use_specter=use_specter,
410
                                                    laststeps=tilenight_laststeps)
411
            else:
412
                ## If running redshifts and there is a future exposure of the same tile
413
                ## then only run per exposure redshifts until then
414
                if lasttype == 'science' and z_submit_types is not None and not use_tilenight:
×
415
                    tile_exps = etable['EXPID'][((etable['TILEID'] == lasttile) &
×
416
                                                 (etable['LASTSTEP'] == 'all'))]
417
                    unprocd_exps = [exp not in ptable_expids for exp in tile_exps]
×
418
                    if np.any(unprocd_exps):
×
419
                        print(f"Identified that tile {lasttile} has future exposures"
×
420
                            + f" for this night. Not submitting full night "
421
                            + f"redshift jobs.")
422
                        if 'perexp' in z_submit_types:
×
423
                            print("Still submitting perexp redshifts")
×
424
                            cur_z_submit_types = ['perexp']
×
425
                        else:
426
                            cur_z_submit_types = None
×
427
                ptable, calibjobs, sciences, internal_id \
×
428
                    = checkfor_and_submit_joint_job(ptable, arcs, flats, sciences,
429
                                                calibjobs,
430
                                                lasttype, internal_id,
431
                                                dry_run=dry_run_level,
432
                                                queue=queue,
433
                                                reservation=reservation,
434
                                                strictly_successful=True,
435
                                                check_for_outputs=check_for_outputs,
436
                                                resubmit_partial_complete=resubmit_partial_complete,
437
                                                z_submit_types=cur_z_submit_types,
438
                                                system_name=system_name)
439

440
        prow = erow_to_prow(erow)
×
441
        prow['INTID'] = internal_id
×
442
        internal_id += 1
×
443
        if prow['OBSTYPE'] == 'dark':
×
444
            if num_zeros == 0:
×
445
                prow['JOBDESC'] = 'badcol'   # process dark for bad columns even if we don't have zeros for nightlybias
×
446
            else:
447
                prow['JOBDESC'] = 'ccdcalib' # ccdcalib = nightlybias(zeros) + badcol(dark)
×
448
        else:
449
            prow['JOBDESC'] = prow['OBSTYPE']
×
450
        prow = define_and_assign_dependency(prow, calibjobs)
×
451
        if (not use_tilenight) or erow['OBSTYPE'] != 'science':
×
452
            print(f"\nProcessing: {prow}\n")
×
453
            prow = create_and_submit(prow, dry_run=dry_run_level, queue=queue,
×
454
                                 reservation=reservation, strictly_successful=True,
455
                                 check_for_outputs=check_for_outputs,
456
                                 resubmit_partial_complete=resubmit_partial_complete,
457
                                 system_name=system_name,use_specter=use_specter)
458

459
            ## If processed a dark, assign that to the dark job
460
            if curtype == 'dark':
×
461
                prow['CALIBRATOR'] = 1
×
462
                calibjobs[prow['JOBDESC']] = prow.copy()
×
463

464
            ## Add the processing row to the processing table
465
            ptable.add_row(prow)
×
466

467
        ptable_expids = np.append(ptable_expids, erow['EXPID'])
×
468

469
        ## Note: Assumption here on number of flats
470
        if curtype == 'flat' and calibjobs['nightlyflat'] is None \
×
471
                and int(erow['SEQTOT']) < 5 \
472
                and np.abs(float(erow['EXPTIME'])-120.) < 1.:
473
            flats.append(prow)
×
474
        elif curtype == 'arc' and calibjobs['psfnight'] is None:
×
475
            arcs.append(prow)
×
476
        elif curtype == 'science' and (prow['LASTSTEP'] != 'skysub' or use_tilenight):
×
477
            sciences.append(prow)
×
478

479
        lasttile = curtile
×
480
        lasttype = curtype
×
481

482
        tableng = len(ptable)
×
483
        if tableng > 0 and ii % 1 == 0 and dry_run_level < 3:
×
484
            write_table(ptable, tablename=proc_table_pathname)
×
485

486
        sleep_and_report(1, message_suffix=f"to slow down the queue submission rate",
×
487
                         dry_run=dry_run)
488

489
        ## Flush the outputs
490
        sys.stdout.flush()
×
491
        sys.stderr.flush()
×
492

493
    if tableng > 0:
×
494
        ## No more data coming in, so do bottleneck steps if any apply
495

496
        # don't submit cumulative redshifts for lasttile if it isn't in tiles_cumulative
497
        if (z_submit_types is not None) and ('cumulative' in z_submit_types) and (lasttile not in tiles_cumulative):
×
498
            cur_z_submit_types = z_submit_types.copy()
×
499
            cur_z_submit_types.remove('cumulative')
×
500
        else:
501
            cur_z_submit_types = z_submit_types
×
502

503
        if use_tilenight and len(sciences)>0:
×
504
            ptable, sciences, internal_id \
×
505
                = submit_tilenight_and_redshifts(ptable, sciences, calibjobs, lasttype, internal_id,
506
                                                dry_run=dry_run_level,
507
                                                queue=queue,
508
                                                reservation=reservation,
509
                                                strictly_successful=True,
510
                                                check_for_outputs=check_for_outputs,
511
                                                resubmit_partial_complete=resubmit_partial_complete,
512
                                                z_submit_types=cur_z_submit_types,
513
                                                system_name=system_name,use_specter=use_specter,
514
                                                laststeps=tilenight_laststeps)
515
        else:
516
            ptable, calibjobs, sciences, internal_id \
×
517
                = checkfor_and_submit_joint_job(ptable, arcs, flats, sciences, calibjobs,
518
                                            lasttype, internal_id, dry_run=dry_run_level,
519
                                            queue=queue, reservation=reservation,
520
                                            strictly_successful=True,
521
                                            check_for_outputs=check_for_outputs,
522
                                            resubmit_partial_complete=resubmit_partial_complete,
523
                                            z_submit_types=cur_z_submit_types,
524
                                            system_name=system_name)
525
        ## All jobs now submitted, update information from job queue and save
526
        ptable = update_from_queue(ptable, dry_run=dry_run_level)
×
527
        if dry_run_level < 3:
×
528
            write_table(ptable, tablename=proc_table_pathname)
×
529

530
    print(f"Completed submission of exposures for night {night}.", '\n\n\n')
×
531

532

533
def get_completed_tiles(specstatus_path=None, complete_tiles_thrunight=None):
1✔
534
    """
535
    Uses a tiles-specstatus.ecsv file and selection criteria to determine
536
    what tiles have beeen completed. Takes an optional argument to point
537
    to a custom specstatus file. Returns an array of TILEID's.
538

539
    Args:
540
        specstatus_path, str, optional. Default is $DESI_SURVEYOPS/ops/tiles-specstatus.ecsv.
541
            Location of the surveyops specstatus table.
542
        complete_tiles_thrunight, int, optional. Default is None. Only tiles completed
543
            on or before the supplied YYYYMMDD are considered
544
            completed and will be processed. All complete
545
            tiles are submitted if None.
546

547
    Returns:
548
        array-like. The tiles from the specstatus file determined by the
549
        selection criteria to be completed.
550
    """
551
    log = get_logger()
1✔
552
    if specstatus_path is None:
1✔
553
        if 'DESI_SURVEYOPS' not in os.environ:
×
554
            raise ValueError("DESI_SURVEYOPS is not defined in your environment. " +
×
555
                             "You must set it or specify --specstatus-path explicitly.")
556
        specstatus_path = os.path.join(os.environ['DESI_SURVEYOPS'], 'ops',
×
557
                                       'tiles-specstatus.ecsv')
558
        log.info(f"specstatus_path not defined, setting default to {specstatus_path}.")
×
559
    if not os.path.exists(specstatus_path):
1✔
560
        raise IOError(f"Couldn't find {specstatus_path}.")
×
561
    specstatus = Table.read(specstatus_path)
1✔
562

563
    ## good tile selection
564
    iszdone = (specstatus['ZDONE'] == 'true')
1✔
565
    isnotmain = (specstatus['SURVEY'] != 'main')
1✔
566
    enoughfraction = 0.1  # 10% rather than specstatus['MINTFRAC']
1✔
567
    isenoughtime = (specstatus['EFFTIME_SPEC'] >
1✔
568
                    specstatus['GOALTIME'] * enoughfraction)
569
    ## only take the approved QA tiles in main
570
    goodtiles = iszdone
1✔
571
    ## not all special and cmx/SV tiles have zdone set, so also pass those with enough time
572
    goodtiles |= (isenoughtime & isnotmain)
1✔
573
    ## main backup also don't have zdone set, so also pass those with enough time
574
    goodtiles |= (isenoughtime & (specstatus['FAPRGRM'] == 'backup'))
1✔
575

576
    if complete_tiles_thrunight is not None:
1✔
577
        goodtiles &= (specstatus['LASTNIGHT'] <= complete_tiles_thrunight)
×
578

579
    return np.array(specstatus['TILEID'][goodtiles])
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc