• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

desihub / desispec / 19150128419

06 Nov 2025 09:17PM UTC coverage: 37.704% (+0.7%) from 37.002%
19150128419

Pull #2521

github

web-flow
Merge 2934f9b9b into 6a90a0547
Pull Request #2521: Add redshift QA scripts

12985 of 34439 relevant lines covered (37.7%)

0.38 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

68.9
/py/desispec/scripts/proc_night.py
1
"""
2
desispec.scripts.proc_night
3
=============================
4

5
"""
6
from desispec.io import findfile
1✔
7
from desispec.scripts.link_calibnight import derive_include_exclude
1✔
8
from desispec.workflow.calibration_selection import \
1✔
9
    determine_calibrations_to_proc
10
from desispec.workflow.science_selection import determine_science_to_proc, \
1✔
11
    get_tiles_cumulative
12
from desiutil.log import get_logger
1✔
13
import numpy as np
1✔
14
import os
1✔
15
import sys
1✔
16
import time
1✔
17
import re
1✔
18
from socket import gethostname
1✔
19
from astropy.table import Table, vstack
1✔
20

21
from desispec.scripts.update_exptable import update_exposure_table
1✔
22
from desispec.workflow.tableio import load_table, load_tables, write_table
1✔
23
from desispec.workflow.utils import sleep_and_report, \
1✔
24
    verify_variable_with_environment, load_override_file
25
from desispec.workflow.timing import what_night_is_it, during_operating_hours
1✔
26
from desispec.workflow.exptable import get_last_step_options, \
1✔
27
    read_minimal_science_exptab_cols
28
from desispec.workflow.proctable import default_obstypes_for_proctable, \
1✔
29
    erow_to_prow, default_prow, read_minimal_tilenight_proctab_cols
30
from desispec.workflow.submission import submit_linkcal_jobs, \
1✔
31
    submit_necessary_biasnights_and_preproc_darks
32
from desispec.workflow.processing import define_and_assign_dependency, \
1✔
33
    create_and_submit, \
34
    submit_tilenight_and_redshifts, \
35
    generate_calibration_dict, \
36
    night_to_starting_iid, make_joint_prow, \
37
    set_calibrator_flag, make_exposure_prow, \
38
    all_calibs_submitted, \
39
    update_and_recursively_submit, update_accounted_for_with_linking, \
40
    submit_redshifts, check_darknight_deps_and_update_prow
41
from desispec.workflow.queue import update_from_queue, any_jobs_need_resubmission, \
1✔
42
    get_resubmission_states
43
from desispec.io.util import decode_camword, difference_camwords, \
1✔
44
    create_camword, replace_prefix, erow_to_goodcamword, camword_union
45

46

47
def proc_night(night=None, proc_obstypes=None, z_submit_types=None,
1✔
48
               queue=None, reservation=None, system_name=None,
49
               exp_table_pathname=None, proc_table_pathname=None,
50
               override_pathname=None, update_exptable=False,
51
               dry_run_level=0, n_nights_darks=None, dry_run=False, no_redshifts=False,
52
               ignore_proc_table_failures = False, 
53
               dont_check_job_outputs=False, dont_resubmit_partial_jobs=False,
54
               tiles=None, surveys=None, science_laststeps=None,
55
               all_tiles=False, specstatus_path=None, use_specter=False,
56
               no_cte_flats=False, complete_tiles_thrunight=None,
57
               all_cumulatives=False, daily=False, specprod=None,
58
               path_to_data=None, exp_obstypes=None, camword=None,
59
               badcamword=None, badamps=None, exps_to_ignore=None,
60
               sub_wait_time=0.1, verbose=False, dont_require_cals=False,
61
               psf_linking_without_fflat=False, no_resub_failed=False,
62
               no_resub_any=False, still_acquiring=False):
63
    """
64
    Process some or all exposures on a night. Can be used to process an entire
65
    night, or used to process data currently available on a given night using
66
    the '--daily' flag.
67

68
    Args:
69
        night (int): The night of data to be processed. Exposure table must exist.
70
        proc_obstypes (list or np.array, optional): A list of exposure OBSTYPE's
71
            that should be processed (and therefore added to the processing table).
72
        z_submit_types (list of str):
73
            The "group" types of redshifts that should be submitted with each
74
            exposure. If not specified, default for daily processing is
75
            ['cumulative', 'pernight-v0']. If false, 'false', or [], then no
76
            redshifts are submitted.
77
        queue (str, optional): The name of the queue to submit the jobs to.
78
            Default is "realtime".
79
        reservation (str, optional): The reservation to submit jobs to.
80
            If None, it is not submitted to a reservation.
81
        system_name (str): batch system name, e.g. cori-haswell, cori-knl,
82
            perlmutter-gpu
83
        exp_table_pathname (str): Full path to where to exposure tables are stored,
84
            including file name.
85
        proc_table_pathname (str): Full path to where to processing tables to be
86
            written, including file name
87
        override_pathname (str): Full path to the override file.
88
        update_exptable (bool): If true then the exposure table is updated.
89
            The default is False.
90
        dry_run_level (int, optional): If nonzero, this is a simulated run. Default is 0.
91
            0 which runs the code normally.
92
            1 writes all files but doesn't submit any jobs to Slurm.
93
            2 writes tables but doesn't write scripts or submit anything.
94
            3 Doesn't write or submit anything but queries Slurm normally for job status.
95
            4 Doesn't write, submit jobs, or query Slurm.
96
            5 Doesn't write, submit jobs, or query Slurm; instead it makes up the status of the jobs.
97
        n_nights_darks (int, optional): Number of nights of darks to process into a darknight.
98
            Default is None, which uses the function's default.
99
        dry_run (bool, optional): When to run without submitting scripts or
100
            not. If dry_run_level is defined, then it over-rides this flag.
101
            dry_run_level not set and dry_run=True, dry_run_level is set to 3
102
            (no scripts generated or run). Default for dry_run is False.
103
        no_redshifts (bool, optional): Whether to submit redshifts or not.
104
            If True, redshifts are not submitted.
105
        ignore_proc_table_failures (bool, optional): True if you want to submit
106
            other jobs even the loaded processing table has incomplete jobs in
107
            it. Use with caution. Default is False. 
108
        dont_check_job_outputs (bool, optional): Default is False. If False,
109
            the code checks for the existence of the expected final data
110
            products for the script being submitted. If all files exist and
111
            this is False, then the script will not be submitted. If some
112
            files exist and this is False, only the subset of the cameras
113
            without the final data products will be generated and submitted.
114
        dont_resubmit_partial_jobs (bool, optional): Default is False. Must be
115
            used with dont_check_job_outputs=False. If this flag is False, jobs
116
            with some prior data are pruned using PROCCAMWORD to only process
117
            the remaining cameras not found to exist.
118
        tiles (array-like, optional): Only submit jobs for these TILEIDs.
119
        surveys (array-like, optional): Only submit science jobs for these
120
            surveys (lowercase)
121
        science_laststeps (array-like, optional): Only submit jobs for exposures
122
            with LASTSTEP in these science_laststeps (lowercase)
123
        all_tiles (bool, optional): Default is False. Set to NOT restrict to
124
            completed tiles as defined by the table pointed to by specstatus_path.
125
        specstatus_path (str, optional): Default is
126
            $DESI_SURVEYOPS/ops/tiles-specstatus.ecsv. Location of the
127
            surveyops specstatus table.
128
        use_specter (bool, optional): Default is False. If True, use specter,
129
            otherwise use gpu_specter by default.
130
        no_cte_flats (bool, optional): Default is False. If False, cte flats
131
            are used if available to correct for cte effects.
132
        complete_tiles_thrunight (int, optional): Default is None. Only tiles
133
            completed on or before the supplied YYYYMMDD are considered
134
            completed and will be processed. All complete tiles are submitted
135
            if None or all_tiles is True.
136
        all_cumulatives (bool, optional): Default is False. Set to run
137
            cumulative redshifts for all tiles even if the tile has observations
138
            on a later night.
139
        specprod: str. The name of the current production. If used, this will
140
            overwrite the SPECPROD environment variable.
141
        daily: bool. Flag that sets other flags for running this script for the
142
            daily pipeline.
143
        path_to_data: str. Path to the raw data.
144
        exp_obstypes: str or comma separated list of strings. The exposure
145
            OBSTYPE's that you want to include in the exposure table.
146
        camword: str. Camword that, if set, alters the set of cameras that will
147
            be set for processing. Examples: a0123456789, a1, a2b3r3,
148
            a2b3r4z3. Note this is only true for new exposures being
149
            added to the exposure_table in 'daily' mode.
150
        badcamword: str. Camword that, if set, will be removed from the camword
151
            defined in camword if given, or the camword inferred from
152
            the data if camword is not given. Note this is only true
153
            for new exposures being added to the exposure_table
154
            in 'daily' mode.
155
        badamps: str. Comma seperated list of bad amplifiers that should not
156
            be processed. Should be of the form "{camera}{petal}{amp}",
157
            i.e. "[brz][0-9][ABCD]". Example: 'b7D,z8A'. Note this is
158
            only true for new exposures being added to the
159
            exposure_table in 'daily' mode.
160
        sub_wait_time: int. Wait time in seconds between submission loops.
161
            Default 0.1 seconds.
162
        verbose: bool. True if you want more verbose output, false otherwise.
163
            Current not propagated to lower code, so it is only used in the
164
            main daily_processing script itself.
165
        dont_require_cals: bool. Default False. If set then the code doesn't
166
            require either a valid set of calibrations or a valid override file
167
            to link to calibrations in order to proceed with science processing.
168
        psf_linking_without_fflat: bool. Default False. If set then the code
169
            will NOT raise an error if asked to link psfnight calibrations
170
            without fiberflatnight calibrations.
171
        no_resub_failed: bool. Set to True if you do NOT want to resubmit
172
            jobs with Slurm status 'FAILED' by default. Default is False.
173
        no_resub_any: bool. Set to True if you do NOT want to resubmit
174
            jobs. Default is False.
175
        still_acquiring: bool. If True, assume more data might be coming, e.g.
176
            wait for additional exposures of latest tile.  If False, auto-derive
177
            True/False based upon night and current time. Primarily for testing.
178
    """
179
    ## Get logger
180
    log = get_logger()
1✔
181
    log.info(f'----- Processing {night} at {time.asctime()} -----')
1✔
182
    log.info(f"SLURM_JOB_ID={os.getenv('SLURM_JOB_ID')} on {gethostname()}")
1✔
183

184
    ## Inform user of how some parameters will be used
185
    if camword is not None:
1✔
186
        log.info(f"Note custom {camword=} will only be used for new exposures"
×
187
                 f" being entered into the exposure_table, not all exposures"
188
                 f" to be processed.")
189
    if badcamword is not None:
1✔
190
        log.info(f"Note custom {badcamword=} will only be used for new exposures"
×
191
                 f" being entered into the exposure_table, not all exposures"
192
                 f" to be processed.")
193
    if badamps is not None:
1✔
194
        log.info(f"Note custom {badamps=} will only be used for new exposures"
×
195
                 f" being entered into the exposure_table, not all exposures"
196
                 f" to be processed.")
197

198
    ## Reconcile the dry_run and dry_run_level
199
    if dry_run and dry_run_level == 0:
1✔
200
        dry_run_level = 3
×
201
    elif dry_run_level > 0:
1✔
202
        dry_run = True
1✔
203

204
    ## If running in daily mode, change a bunch of defaults
205
    if daily:
1✔
206
        ## What night are we running on?
207
        true_night = what_night_is_it()
×
208
        if night is not None:
×
209
            night = int(night)
×
210
            if true_night != night:
×
211
                log.info(f"True night is {true_night}, but running daily for {night=}")
×
212
        else:
213
            night = true_night
×
214

215
        if science_laststeps is None:
×
216
            science_laststeps = ['all', 'skysub', 'fluxcal']
×
217

218
        if z_submit_types is None and not no_redshifts:
×
219
            z_submit_types = ['cumulative']
×
220

221
        ## still_acquiring is flag to determine whether to process the last tile in the exposure table
222
        ## or not. This is used in daily mode when processing and exiting mid-night.
223
        ## override still_acquiring==False if daily mode during observing hours
224
        if during_operating_hours(dry_run=dry_run) and (true_night == night):
×
225
            if not still_acquiring:
×
226
                log.info(f'Daily mode during observing hours on current night, so assuming that more data might arrive and setting still_acquiring=True')
×
227
            still_acquiring = True
×
228

229
        update_exptable = True    
×
230
        append_to_proc_table = True
×
231
        all_cumulatives = True
×
232
        all_tiles = True
×
233
        complete_tiles_thrunight = None
×
234
        ## Default for nightly processing is realtime queue
235
        if queue is None:
×
236
            queue = 'realtime'
×
237

238
    ## Default for normal processing is regular queue
239
    if queue is None:
1✔
240
        queue = 'regular'
1✔
241
    log.info(f"Submitting to the {queue} queue.")
1✔
242
             
243
    ## Set night
244
    if night is None:
1✔
245
        err = "Must specify night unless running in daily=True mode"
×
246
        log.error(err)
×
247
        raise ValueError(err)
×
248
    else:
249
        log.info(f"Processing {night=}")
1✔
250

251
    ## Recast booleans from double negative
252
    check_for_outputs = (not dont_check_job_outputs)
1✔
253
    resubmit_partial_complete = (not dont_resubmit_partial_jobs)
1✔
254
    require_cals = (not dont_require_cals)
1✔
255
    do_cte_flats = (not no_cte_flats)
1✔
256
    ## False if not submitting or simulating
257
    update_proctable = (dry_run_level == 0 or dry_run_level > 3)
1✔
258
    
259
    ## cte flats weren't available before 20211130 so hardcode that in
260
    if do_cte_flats and night < 20211130:
1✔
261
        log.info("Asked to do cte flat correction but before 20211130 no "
1✔
262
                    + "no cte flats are available to do the correction. "
263
                    + "Code will NOT perform cte flat corrections.")
264
        do_cte_flats = False
1✔
265

266
    ###################
267
    ## Set filenames ##
268
    ###################
269
    ## Ensure specprod is set in the environment and that it matches user
270
    ## specified value if given
271
    specprod = verify_variable_with_environment(specprod, var_name='specprod',
1✔
272
                                                env_name='SPECPROD')
273

274
    ## Determine where the exposure table will be written
275
    if exp_table_pathname is None:
1✔
276
        exp_table_pathname = findfile('exposure_table', night=night)
1✔
277
    if not os.path.exists(exp_table_pathname) and not update_exptable:
1✔
278
        raise IOError(f"Exposure table: {exp_table_pathname} not found. Exiting this night.")
×
279

280
    ## Determine where the processing table will be written
281
    if proc_table_pathname is None:
1✔
282
        proc_table_pathname = findfile('processing_table', night=night)
1✔
283
    proc_table_path = os.path.dirname(proc_table_pathname)
1✔
284
    if dry_run_level < 3:
1✔
285
        os.makedirs(proc_table_path, exist_ok=True)
1✔
286

287
    ## Determine where the unprocessed data table will be written
288
    unproc_table_pathname = replace_prefix(proc_table_pathname, 'processing', 'unprocessed')
1✔
289

290
    ## Require cal_override to exist if explcitly specified
291
    if override_pathname is None:
1✔
292
        override_pathname = findfile('override', night=night, readonly=True)
1✔
293
    elif not os.path.exists(override_pathname):
×
294
        raise IOError(f"Specified override file: "
×
295
                      f"{override_pathname} not found. Exiting this night.")
296

297
    #######################################
298
    ## Define parameters based on inputs ##
299
    #######################################
300
    ## If science_laststeps not defined, default is only LASTSTEP=='all' exposures
301
    if science_laststeps is None:
1✔
302
        science_laststeps = ['all']
1✔
303
    else:
304
        laststep_options = get_last_step_options()
×
305
        for laststep in science_laststeps:
×
306
            if laststep not in laststep_options:
×
307
                raise ValueError(f"Couldn't understand laststep={laststep} "
×
308
                                 + f"in science_laststeps={science_laststeps}.")
309
    log.info(f"Processing exposures with the following LASTSTEP's: {science_laststeps}")
1✔
310

311
    ## Define the group types of redshifts you want to generate for each tile
312
    if no_redshifts:
1✔
313
        log.info(f"no_redshifts set, so ignoring {z_submit_types=}")
×
314
        z_submit_types = None
×
315

316
    if z_submit_types is None:
1✔
317
        log.info("Not submitting scripts for redshift fitting")
1✔
318
    else:
319
        for ztype in z_submit_types:
1✔
320
            if ztype not in ['cumulative', 'pernight-v0', 'pernight', 'perexp']:
1✔
321
                raise ValueError(f"Couldn't understand ztype={ztype} "
×
322
                                 + f"in z_submit_types={z_submit_types}.")
323
        log.info(f"Redshift fitting with redshift group types: {z_submit_types}")
1✔
324

325
    ## Identify OBSTYPES to process
326
    if proc_obstypes is None:
1✔
327
        proc_obstypes = default_obstypes_for_proctable()
1✔
328

329
    #############################
330
    ## Start the Actual Script ##
331
    #############################
332
    ## If running in daily mode, or requested, then update the exposure table
333
    ## This reads in and writes out the exposure table to disk
334
    if update_exptable:
1✔
335
        log.info("Running update_exposure_table.")
×
336
        update_exposure_table(night=night, specprod=specprod,
×
337
                              exp_table_pathname=exp_table_pathname,
338
                              path_to_data=path_to_data, exp_obstypes=exp_obstypes,
339
                              camword=camword, badcamword=badcamword, badamps=badamps,
340
                              exps_to_ignore=exps_to_ignore,
341
                              dry_run_level=dry_run_level, verbose=verbose)
342
        log.info("Done with update_exposure_table.\n\n")
×
343

344
    ## Combine the table names and types for easier passing to io functions
345
    table_pathnames = [exp_table_pathname, proc_table_pathname]
1✔
346
    table_types = ['exptable', 'proctable']
1✔
347

348
    ## Load in the files defined above
349
    etable, init_ptable = load_tables(tablenames=table_pathnames, tabletypes=table_types)
1✔
350
    full_etable = etable.copy()
1✔
351

352
    ## Set default camword for linkcal and biaspdark jobs that don't rely on specific exposures
353
    if camword is None:
1✔
354
        if len(etable) > 0:
1✔
355
            camword = camword_union(etable['CAMWORD'])
1✔
356
        else:
357
            camword = 'a0123456789'
×
358
            
359
    ## Now that the exposure table is updated, check if we need to run biases and/or preproc darks
360
    if n_nights_darks is None:
1✔
361
        n_nights_after_darks = None
1✔
362
        n_nights_before_darks = None
1✔
363
    else:
364
        n_nights_after_darks = int((n_nights_darks-1) // 2)
×
365
        n_nights_before_darks = n_nights_darks - 1 - n_nights_after_darks
×
366
    proc_biasdark_obstypes = ('zero' in proc_obstypes or 'dark' in proc_obstypes)
1✔
367
    no_bias_job = (len(init_ptable) == 0
1✔
368
               or ('biasnight' not in init_ptable['JOBDESC'] and 'biaspdark' not in init_ptable['JOBDESC']) )
369
    should_submit = ((not still_acquiring)
1✔
370
                     or (len(etable) > 2 and np.sum(etable['OBSTYPE']=='dark') > 0
371
                         and np.sum(etable['OBSTYPE']=='arc') > 0 and np.sum(etable['OBSTYPE']=='zero') > 9)
372
                    )
373
    returned_ptable = None
1✔
374
    if proc_biasdark_obstypes and no_bias_job and should_submit:
1✔
375
        ## This will populate the processing table with the biases and preproc dark job if 
376
        ## it needed to submit them. It will do it for future and past nights relevant for 
377
        ## the current night's dark nights.
378
        log.info("Running submit_necessary_biasnights_and_preproc_darks")
1✔
379
        if n_nights_before_darks is None:
1✔
380
            kwargs = {}
1✔
381
        else:
382
            kwargs = {'n_nights_before': n_nights_before_darks, 'n_nights_after': n_nights_after_darks}
×
383
        returned_ptable = submit_necessary_biasnights_and_preproc_darks(reference_night=night, proc_obstypes=proc_obstypes, 
1✔
384
                                                                        camword=camword, badcamword=badcamword, badamps=badamps,
385
                                                                        exp_table_pathname=exp_table_pathname,
386
                                                                        proc_table_pathname=proc_table_pathname,
387
                                                                        specprod=specprod, path_to_data=path_to_data,
388
                                                                        sub_wait_time=sub_wait_time, dry_run_level=dry_run_level,
389
                                                                        queue=queue, system_name=system_name,
390
                                                                        psf_linking_without_fflat=psf_linking_without_fflat,
391
                                                                        **kwargs)
392
        log.info("Done with submit_necessary_biasnights_and_preproc_darks.\n\n")
1✔
393
    elif proc_biasdark_obstypes and (not still_acquiring) and (not no_bias_job):
1✔
394
        log.info("Running submit_necessary_biasnights_and_preproc_darks to process any additional darks")
1✔
395
        kwargs = {'n_nights_before': 0,  'n_nights_after': 0}
1✔
396
        returned_ptable = submit_necessary_biasnights_and_preproc_darks(reference_night=night, proc_obstypes=proc_obstypes,
1✔
397
                                                                        camword=camword, badcamword=badcamword, badamps=badamps,
398
                                                                        exp_table_pathname=exp_table_pathname,
399
                                                                        proc_table_pathname=proc_table_pathname,
400
                                                                        specprod=specprod, path_to_data=path_to_data,
401
                                                                        sub_wait_time=sub_wait_time, dry_run_level=dry_run_level,
402
                                                                        queue=queue, system_name=system_name,
403
                                                                        psf_linking_without_fflat=psf_linking_without_fflat,
404
                                                                        **kwargs)
405

406
    ## Load in the updated processing table if saved to disk, otherwise use what is in memory
407
    if dry_run_level > 2:
1✔
408
        if returned_ptable is None:
1✔
409
            ptable = init_ptable
×
410
        else:
411
            ptable = returned_ptable
1✔
412
    else:
413
        ptable = load_table(tablename=proc_table_pathname, tabletype='proctable')
1✔
414
    
415
    ## Quickly exit if we haven't processed the biasprdark job yet and we should have
416
    jobtypes_requested = ('zero' in proc_obstypes or 'dark' in proc_obstypes)
1✔
417
    job_exists = np.any(np.isin(np.array([b'biasnight', b'biaspdark', b'linkcal']), ptable['JOBDESC'].data))
1✔
418
    # ## ptables not saved for levels 3 and over, so if still acquiring, assume not yet available
419
    # ## otherwise assume it is available
420
    #expect_job_exist = ( dry_run_level<3 or (still_acquiring and dry_run_level>=3) )
421
    if require_cals and jobtypes_requested and not job_exists:
1✔
422
        log.critical("Bias and preproc dark job not found in processing table. "
×
423
                    + "We will need to wait for darks to be processed. "
424
                    + f"Exiting {night=}.")
425
        ## If still acquiring new data in daily mode, don't exit with error code
426
        ## But do exit
427
        log.info(f'Stopping at {time.asctime()}\n')
×
428
        if still_acquiring:
×
429
            if len(ptable) > 0:
×
430
                processed = np.isin(full_etable['EXPID'],
×
431
                                    np.unique(np.concatenate(ptable['EXPID'])))
432
                unproc_table = full_etable[~processed]
×
433
            else:
434
                unproc_table = full_etable
×
435

436
            return ptable, unproc_table
×
437
        else:
438
            sys.exit(1)
×
439

440
    ## For I/O efficiency, pre-populate exposure table and processing table caches
441
    ## of all nights if doing cross-night redshifts so that future per-night "reads"
442
    ## will use the cache.
443
    if z_submit_types is not None and 'cumulative' in z_submit_types:
1✔
444
        ## this shouldn't need to change since we've already updated the exptab
445
        read_minimal_science_exptab_cols()
1✔
446
        ## this would become out of date for the current night except
447
        ## write_table will keep it up to date
448
        read_minimal_tilenight_proctab_cols()
1✔
449

450
    ## Cut on OBSTYPES
451
    log.info(f"Processing the following obstypes: {proc_obstypes}")
1✔
452
    good_types = np.isin(np.array(etable['OBSTYPE']).astype(str), proc_obstypes)
1✔
453
    etable = etable[good_types]
1✔
454

455
    ## Update processing table
456
    tableng = len(ptable)
1✔
457
    if tableng > 0:
1✔
458
        if update_proctable:
1✔
459
            ptable = update_from_queue(ptable, dry_run_level=dry_run_level)
1✔
460
        if dry_run_level < 3:
1✔
461
            write_table(ptable, tablename=proc_table_pathname, tabletype='proctable')
1✔
462
        if any_jobs_need_resubmission(ptable['STATUS']) and not no_resub_any:
1✔
463
            ## Try up to two times to resubmit failures, afterwards give up
464
            ## unless explicitly told to proceed with the failures
465
            ## Note after 2 resubmissions, the code won't resubmit anymore even
466
            ## if given ignore_proc_table_failures
467
            log.info("Job failures were detected. Resubmitting those jobs "
×
468
                     + "before continuing with new submissions.")
469

470
            ptable, nsubmits = update_and_recursively_submit(ptable,
×
471
                                                             no_resub_failed=no_resub_failed,
472
                                                             max_resubs=2,
473
                                                             ptab_name=proc_table_pathname,
474
                                                             dry_run_level=dry_run_level,
475
                                                             reservation=reservation)
476

477
            if any_jobs_need_resubmission(ptable['STATUS']):
×
478
                if not ignore_proc_table_failures:
×
479
                    err = "Some jobs have an incomplete job status. This script " \
×
480
                          + "will not fix them. You should remedy those first. "
481
                    log.error(err)
×
482
                    ## if the failures are in calibrations, then crash since
483
                    ## we need them for any new jobs
484
                    if any_jobs_need_resubmission(ptable['STATUS'][ptable['CALIBRATOR'] > 0]):
×
485
                        err += "To proceed anyway use "
×
486
                        err += "'--ignore-proc-table-failures'. Exiting."
×
487
                        raise AssertionError(err)
488
                else:
489
                    log.warning("Some jobs have an incomplete job status, but "
×
490
                          + "you entered '--ignore-proc-table-failures'. This "
491
                          + "script will not fix them. "
492
                          + "You should have fixed those first. Proceeding...")
493
        ## Short cut to exit faster if all science exposures have been processed
494
        ## but only if we have successfully processed the calibrations
495
        good_etab = etable[etable['LASTSTEP']=='all']
1✔
496
        terminal_cal_reached = False
1✔
497
        if 'nightlyflat' in ptable['JOBDESC']:
1✔
498
            terminal_cal_reached = True
×
499
        elif np.sum(good_etab['OBSTYPE']=='flat') < 12 and not still_acquiring \
1✔
500
                and 'psfnight' in ptable['JOBDESC']:
501
            terminal_cal_reached = True
×
502
        # scisel = ptable['OBSTYPE'] == 'science'
503
        # if np.sum(scisel) > 0:
504
        #     ptable_expids = set(np.concatenate(ptable['EXPID'][scisel]))
505
        # else:
506
        #     ptable_expids = set()
507
        etable_expids = set(etable['EXPID'][etable['OBSTYPE'] == 'science'])
1✔
508
        if terminal_cal_reached:
1✔
509
            if len(etable_expids) == 0:
×
510
                log.info(f"No science exposures yet. Exiting at {time.asctime()}.")
×
511
                return ptable, None
×
512
            # elif len(etable_expids.difference(ptable_expids)) == 0:
513
            #     log.info("All science EXPID's already present in processing table, "
514
            #              + f"nothing to run. Exiting at {time.asctime()}.")
515
            #     return ptable, None
516

517
        int_id = np.max(ptable['INTID'])+1
1✔
518
    else:
519
        int_id = night_to_starting_iid(night=night)
×
520

521
    ################### Determine What to Process ###################
522
    ## Load calibration_override_file
523
    overrides = load_override_file(filepathname=override_pathname)
1✔
524
    cal_override = {}
1✔
525
    if 'calibration' in overrides:
1✔
526
        cal_override = overrides['calibration']
1✔
527

528
    ## Determine calibrations that will be linked
529
    files_to_link = None
1✔
530
    if 'linkcal' in cal_override and (len(ptable) == 0 or 'linkcal' not in ptable['JOBDESC']):
1✔
531
        proccamword = difference_camwords(camword, badcamword)
×
532
        ptable, files_to_link = submit_linkcal_jobs(night, ptable, cal_override=cal_override,
×
533
                        psf_linking_without_fflat=psf_linking_without_fflat, proccamword=proccamword,
534
                        dry_run_level=dry_run_level, queue=queue, reservation=reservation,
535
                        check_outputs=check_for_outputs, system_name=system_name)
536
        if len(ptable) > 0 and dry_run_level < 3:
×
537
            write_table(ptable, tablename=proc_table_pathname, tabletype='proctable')
×
538
            sleep_and_report(sub_wait_time,
×
539
                             message_suffix=f"to slow down the queue submission rate",
540
                             dry_run=dry_run, logfunc=log.info)
541

542
    ## Identify what calibrations have been done
543
    calibjobs = generate_calibration_dict(ptable, files_to_link)
1✔
544

545
    ## Determine the appropriate set of calibrations
546
    ## Only run if we haven't already linked or done fiberflatnight's
547
    cal_etable = etable[[]]
1✔
548
    if not all_calibs_submitted(calibjobs['accounted_for'], do_cte_flats):
1✔
549
        cal_etable = determine_calibrations_to_proc(etable,
1✔
550
                                                    do_cte_flats=do_cte_flats,
551
                                                    still_acquiring=still_acquiring)
552

553
    ## Determine the appropriate science exposures
554
    sci_etable, tiles_to_proc = determine_science_to_proc(
1✔
555
                                        etable=etable, tiles=tiles,
556
                                        surveys=surveys, laststeps=science_laststeps,
557
                                        all_tiles=all_tiles,
558
                                        ignore_last_tile=still_acquiring,
559
                                        complete_tiles_thrunight=complete_tiles_thrunight,
560
                                        specstatus_path=specstatus_path)
561

562
    ## For cumulative redshifts, identify tiles for which this is the last
563
    ## night that they were observed
564
    tiles_cumulative = get_tiles_cumulative(sci_etable, z_submit_types,
1✔
565
                                            all_cumulatives, night)
566

567
    ################### Process the data ###################
568
    ## Process Calibrations
569
    ## For now assume that a linkcal job links all files and we therefore
570
    ## don't need to submit anything more.
571
    def create_submit_add_and_save(prow, proctable, check_outputs=check_for_outputs,
1✔
572
                                   extra_job_args=None):
573
        log.info(f"\nProcessing: {prow}\n")
1✔
574
        prow = create_and_submit(prow, dry_run=dry_run_level, queue=queue,
1✔
575
                                 reservation=reservation,
576
                                 strictly_successful=True,
577
                                 check_for_outputs=check_outputs,
578
                                 resubmit_partial_complete=resubmit_partial_complete,
579
                                 system_name=system_name,
580
                                 use_specter=use_specter,
581
                                 extra_job_args=extra_job_args)
582
        ## Add the processing row to the processing table
583
        proctable.add_row(prow)
1✔
584
        if len(proctable) > 0 and dry_run_level < 3:
1✔
585
            write_table(proctable, tablename=proc_table_pathname, tabletype='proctable')
1✔
586
        sleep_and_report(sub_wait_time,
1✔
587
                         message_suffix=f"to slow down the queue submission rate",
588
                         dry_run=dry_run, logfunc=log.info)
589
        return prow, proctable
1✔
590

591
    ## Actually process the calibrations
592
    ## Only run if we haven't already linked or done fiberflatnight's
593
    if not all_calibs_submitted(calibjobs['accounted_for'], do_cte_flats):
1✔
594
        ptable, calibjobs, int_id = submit_calibrations(cal_etable, ptable,
1✔
595
                                                cal_override, calibjobs,
596
                                                int_id, night, files_to_link,
597
                                                create_submit_add_and_save,
598
                                                n_nights_before_darks=n_nights_before_darks,
599
                                                n_nights_after_darks=n_nights_after_darks,
600
                                                proc_table_path=os.path.dirname(proc_table_pathname))
601
    
602
    ## Require some minimal level of calibrations to process science exposures
603
    if require_cals and not all_calibs_submitted(calibjobs['accounted_for'], do_cte_flats):
1✔
604
        err = (f"Exiting because not all calibration files accounted for "
×
605
               + f"with links or submissions and require_cals is True.")
606
        log.error(err)
×
607
        ## If still acquiring new data in daily mode, don't exit with error code
608
        ## But do exit
609
        log.info(f'Stopping at {time.asctime()}\n')
×
610
        if still_acquiring:
×
611
            if len(ptable) > 0:
×
612
                processed = np.isin(full_etable['EXPID'],
×
613
                                    np.unique(np.concatenate(ptable['EXPID'])))
614
                unproc_table = full_etable[~processed]
×
615
            else:
616
                unproc_table = full_etable
×
617

618
            return ptable, unproc_table
×
619
        else:
620
            sys.exit(1)
×
621

622
    ## Process Sciences
623
    ## Loop over new tiles and process them
624
    unique_ptab_tiles = np.unique(ptable['TILEID'])
1✔
625
    for tile in tiles_to_proc:
1✔
626
        # don't submit cumulative redshifts for lasttile if it isn't in tiles_cumulative
627
        if z_submit_types is None:
1✔
628
            cur_z_submit_types = []
1✔
629
        else:
630
            cur_z_submit_types = z_submit_types.copy()
1✔
631
        ## Check if tile has already been processed. If it has, see if all
632
        ## steps have been submitted
633
        tnight = None
1✔
634
        if tile in unique_ptab_tiles:
1✔
635
            tile_prows = ptable[ptable['TILEID']==tile]
×
636
            ## old proctables have poststdstar, check for that or tilenight
637
            if 'tilenight' in tile_prows['JOBDESC']:
×
638
                tnight = tile_prows[tile_prows['JOBDESC']=='tilenight'][0]
×
639
            elif 'poststdstar' in tile_prows['JOBDESC']:
×
640
                poststdstars = tile_prows[tile_prows['JOBDESC']=='poststdstar']
×
641
                tnight = poststdstars[-1]
×
642
                ## Try to gather all the expids, but if that fails just move on
643
                ## with the EXPID's from the last entry. This only happens in daily
644
                ## for old proctabs and doesn't matter for cumulative redshifts
645
                if len(poststdstars) > 1:
×
646
                    try:
×
647
                        ## If more than one poststdstar, combine all EXPIDs for fake tilenight job
648
                        tnight['EXPID'] = np.sort(np.concatenate(poststdstars['EXPID']))
×
649
                    except:
×
650
                        ## Log a warning but don't do anything since this only
651
                        ## impacts documentation and not data reduction
652
                        log.warning(f"Tried and failed to populate full EXPIDs for: {dict(tnight)}")
×
653

654
            ## if spectra processed, check for redshifts and remove any found
655
            if tnight is not None:
×
656
                for cur_ztype in cur_z_submit_types.copy():
×
657
                    if cur_ztype in tile_prows['JOBDESC']:
×
658
                        cur_z_submit_types.remove(cur_ztype)
×
659
                ## If the spectra have been processed and all requested redshifts
660
                ## are done, move on to the next tile
661
                if len(cur_z_submit_types) == 0:
×
662
                    continue
×
663

664
        log.info(f'\n\n################# Submitting {tile} #####################')
1✔
665

666
        ## Identify the science exposures for the given tile
667
        tile_etable = sci_etable[sci_etable['TILEID'] == tile]
1✔
668
        
669
        ## Should change submit_tilenight_and_redshifts to take erows
670
        ## but for now will remain backward compatible and use prows
671
        ## Create list of prows from selected etable rows
672
        sciences = []
1✔
673
        for erow in tile_etable:
1✔
674
            prow = erow_to_prow(erow)
1✔
675
            prow['INTID'] = int_id
1✔
676
            int_id += 1
1✔
677
            prow['JOBDESC'] = prow['OBSTYPE']
1✔
678
            prow = define_and_assign_dependency(prow, calibjobs)
1✔
679
            sciences.append(prow)
1✔
680

681
        if 'cumulative' in cur_z_submit_types and tile not in tiles_cumulative:
1✔
682
            cur_z_submit_types.remove('cumulative')
×
683

684
        if len(cur_z_submit_types) == 0:
1✔
685
            cur_z_submit_types = None
1✔
686

687
        ## if not tilenight, do tilenight and redshifts, otherwise just do redshifts
688
        if tnight is None:
1✔
689
            ## Process tilenight and redshifts
690
            ## No longer need to return sciences since this is always the
691
            ## full set of exposures, but will keep for now for backward
692
            ## compatibility
693
            extra_job_args = {}
1✔
694
            if 'science' in overrides and 'tilenight' in overrides['science']:
1✔
695
                extra_job_args = overrides['science']['tilenight']
×
696
            else:
697
                extra_job_args = {}
1✔
698

699
            extra_job_args['z_submit_types'] = cur_z_submit_types
1✔
700
            extra_job_args['laststeps'] = science_laststeps
1✔
701

702
            ptable, sciences, int_id = submit_tilenight_and_redshifts(
1✔
703
                                        ptable, sciences, calibjobs, int_id,
704
                                        dry_run=dry_run_level, queue=queue,
705
                                        reservation=reservation,
706
                                        strictly_successful=True,
707
                                        check_for_outputs=check_for_outputs,
708
                                        resubmit_partial_complete=resubmit_partial_complete,
709
                                        system_name=system_name,
710
                                        use_specter=use_specter,
711
                                        extra_job_args=extra_job_args)
712
        elif cur_z_submit_types is not None:
×
713
            ## Just process redshifts
714
            ptable, int_id = submit_redshifts(ptable, sciences, tnight, int_id,
×
715
                                              queue=queue, reservation=reservation,
716
                                              dry_run=dry_run_level,
717
                                              strictly_successful=True,
718
                                              check_for_outputs=check_for_outputs,
719
                                              resubmit_partial_complete=resubmit_partial_complete,
720
                                              z_submit_types=cur_z_submit_types,
721
                                              system_name=system_name)
722

723
        if len(ptable) > 0 and dry_run_level < 3:
1✔
724
            write_table(ptable, tablename=proc_table_pathname, tabletype='proctable')
1✔
725

726
        sleep_and_report(sub_wait_time,
1✔
727
                         message_suffix=f"to slow down the queue submission rate",
728
                         dry_run=dry_run, logfunc=log.info)
729

730
        ## Flush the outputs
731
        sys.stdout.flush()
1✔
732
        sys.stderr.flush()
1✔
733

734
    ################### Wrap things up ###################
735
    unproc_table = None
1✔
736
    if len(ptable) > 0:
1✔
737
        ## All jobs now submitted, update information from job queue
738
        ## If dry_run_level > 3, then Slurm isn't queried
739
        if update_proctable:
1✔
740
            ptable = update_from_queue(ptable, dry_run_level=dry_run_level)
1✔
741
        if dry_run_level < 3:
1✔
742
            write_table(ptable, tablename=proc_table_pathname, tabletype='proctable')
1✔
743
            ## Now that processing is complete, lets identify what we didn't process
744
            if len(ptable) > 0:
1✔
745
                processed = np.isin(full_etable['EXPID'], np.unique(np.concatenate(ptable['EXPID'])))
1✔
746
                unproc_table = full_etable[~processed]
1✔
747
            else:
748
                unproc_table = full_etable
×
749
            write_table(unproc_table, tablename=unproc_table_pathname)
1✔
750
    elif dry_run_level < 3 and len(full_etable) > 0:
×
751
        ## Done determining what not to process, so write out unproc file
752
        unproc_table = full_etable
×
753
        write_table(unproc_table, tablename=unproc_table_pathname)
×
754

755
    if dry_run_level >= 3:
1✔
756
        log.info(f"{dry_run_level=} so not saving outputs.")
1✔
757
        log.info(f"\n{full_etable=}")
1✔
758
        log.info(f"\nn{ptable=}")
1✔
759
        log.info(f"\n{unproc_table=}")
1✔
760

761
    if still_acquiring:
1✔
762
        log.info(f"Current submission of exposures "
×
763
                 + f"for {night=} are complete except for last tile at {time.asctime()}.\n\n\n\n")
764
    else:
765
        log.info(f"All done: Completed submission of exposures for night {night} at {time.asctime()}.\n")
1✔
766
        
767
    return ptable, unproc_table
1✔
768

769

770
def submit_calibrations(cal_etable, ptable, cal_override, calibjobs, int_id,
1✔
771
                        curnight, files_to_link, create_submit_add_and_save,
772
                        n_nights_before_darks=None, n_nights_after_darks=None,
773
                        proc_table_path=None):
774
    log = get_logger()
1✔
775

776
    if len(cal_etable) == 0:
1✔
777
        return ptable, calibjobs, int_id
×
778
    
779
    if len(ptable) > 0:
1✔
780
        ## we use this to check for individual jobs rather than combination
781
        ## jobs, so only check for scalar jobs where JOBDESC == OBSTYPE
782
        ## ex. dark, zero, arc, and flat
783
        explists = ptable['EXPID'][ptable['JOBDESC']==ptable['OBSTYPE']]
1✔
784
        if len(explists) == 0:
1✔
785
            processed_cal_expids = np.array([]).astype(int)
1✔
786
        elif len(explists) == 1:
×
787
            processed_cal_expids = np.unique(explists[0]).astype(int)
×
788
        else:
789
            processed_cal_expids = np.unique(np.concatenate(explists).astype(int))
×
790
    else:
791
        processed_cal_expids = np.array([]).astype(int)
×
792

793
    ## Otherwise proceed with submitting the calibrations
794
    ## Define objects to process
795
    darks, flats, ctes, cte1s = list(), list(), list(), list()
1✔
796
    zeros = cal_etable[cal_etable['OBSTYPE']=='zero']
1✔
797
    arcs = cal_etable[cal_etable['OBSTYPE']=='arc']
1✔
798
    if 'dark' in cal_etable['OBSTYPE']:
1✔
799
        darks = cal_etable[cal_etable['OBSTYPE']=='dark']
1✔
800
    if 'flat' in cal_etable['OBSTYPE']:
1✔
801
        allflats = cal_etable[cal_etable['OBSTYPE']=='flat']
1✔
802
        is_cte = np.array(['cte' in prog.lower() for prog in allflats['PROGRAM']])
1✔
803
        flats = allflats[~is_cte]
1✔
804
        ctes = allflats[is_cte]
1✔
805

806
    do_darknight = not calibjobs['accounted_for']['darknight']
1✔
807
    do_badcol = len(darks) > 0 and not calibjobs['accounted_for']['badcolumns']
1✔
808
    have_flats_for_cte = len(ctes) > 0 and len(flats) > 0
1✔
809
    do_cte = have_flats_for_cte and not calibjobs['accounted_for']['ctecorrnight']
1✔
810

811
    ## if do badcol or cte, then submit a ccdcalib job, otherwise submit a
812
    ## nightlybias job
813
    if do_darknight or do_badcol or do_cte:
1✔
814
        ######## Submit ccdcalib ########
815
        ## process dark for bad columns even if we don't have zeros for nightlybias
816
        ## ccdcalib = darknight(darks) + badcol(dark) + cte correction
817
        jobdesc = 'ccdcalib'
1✔
818

819
        if calibjobs[jobdesc] is None:
1✔
820
            ## Define which erow to use to create the processing table row
821
            all_expids = []
1✔
822
            if do_badcol or not do_cte:
1✔
823
                ## use first exposure if do_badcol or if (do_darknight and not do_cte)
824
                job_erow = darks[0]
1✔
825
                dark_expid = job_erow['EXPID']
1✔
826
                all_expids.append(dark_expid)
1✔
827
            else:
828
                job_erow = ctes[-1]
1✔
829
            ## if doing cte correction, create expid list of last 120s flat
830
            ## and all ctes provided by the calibration selection function
831
            if do_cte:
1✔
832
                cte_expids = np.array([flats[-1]['EXPID'], *ctes['EXPID']])
1✔
833
                all_expids.extend(cte_expids)
1✔
834
            else:
835
                cte_expids = None
1✔
836

837
            prow, int_id = make_exposure_prow(job_erow, int_id,
1✔
838
                                              calibjobs, jobdesc=jobdesc)
839

840
            if len(all_expids) > 1:
1✔
841
                prow['EXPID'] = np.array(all_expids)
1✔
842

843
            prow['CALIBRATOR'] = 1
1✔
844

845
            if do_darknight:
1✔
846
                prow = check_darknight_deps_and_update_prow(prow, n_nights_before=n_nights_before_darks, 
1✔
847
                                                                          n_nights_after=n_nights_after_darks, 
848
                                                                          proc_table_path=proc_table_path)
849
                #if not enough_darks:
850
                #    log.critical("Not enough darks for every camera. Stopping submission of calibrations "
851
                #                 + "until this criteria is met.")
852
                #    return ptable, calibjobs, int_id
853
                
854
                
855
            extra_job_args = {'steps': []}
1✔
856
            if do_darknight:
1✔
857
                extra_job_args['steps'].append('darknight')
1✔
858
                extra_job_args['n_nights_before'] = n_nights_before_darks
1✔
859
                extra_job_args['n_nights_after'] = n_nights_after_darks
1✔
860
            if do_badcol:
1✔
861
                extra_job_args['steps'].append('badcolumn')
1✔
862
                extra_job_args['dark_expid'] = dark_expid
1✔
863
            if do_cte:
1✔
864
                extra_job_args['steps'].append('ctecorr')
1✔
865
                extra_job_args['cte_expids'] = cte_expids
1✔
866
            prow, ptable = create_submit_add_and_save(prow, ptable,
1✔
867
                                                      extra_job_args=extra_job_args)
868
            calibjobs[prow['JOBDESC']] = prow.copy()
1✔
869
            log.info(f"Submitted ccdcalib job with {do_darknight=}, "
1✔
870
                     + f"{do_badcol=}, {do_cte=}")
871

872
    if do_darknight:
1✔
873
        calibjobs['accounted_for']['darknight'] = True
1✔
874
    if do_badcol:
1✔
875
        calibjobs['accounted_for']['badcolumns'] = True
1✔
876
    if do_cte:
1✔
877
        calibjobs['accounted_for']['ctecorrnight'] = True
1✔
878

879
    ######## Submit arcs and psfnight ########
880
    if len(arcs)>0 and not calibjobs['accounted_for']['psfnight']:
1✔
881
        arc_prows = []
1✔
882
        for arc_erow in arcs:
1✔
883
            if arc_erow['EXPID'] in processed_cal_expids:
1✔
884
                matches = np.where([arc_erow['EXPID'] in itterprow['EXPID']
×
885
                                    for itterprow in ptable])[0]
886
                if len(matches) == 1:
×
887
                    prow = ptable[matches[0]]
×
888
                    log.info("Found existing arc prow in ptable, " 
×
889
                             + f"including it for psfnight job: {list(prow)}")
890
                    arc_prows.append(prow)
×
891
                continue
×
892
            prow, int_id = make_exposure_prow(arc_erow, int_id, calibjobs)
1✔
893
            prow, ptable = create_submit_add_and_save(prow, ptable)
1✔
894
            arc_prows.append(prow)
1✔
895

896
        joint_prow, int_id = make_joint_prow(arc_prows, descriptor='psfnight',
1✔
897
                                             internal_id=int_id)
898
        ptable = set_calibrator_flag(arc_prows, ptable)
1✔
899
        joint_prow, ptable = create_submit_add_and_save(joint_prow, ptable)
1✔
900
        calibjobs[joint_prow['JOBDESC']] = joint_prow.copy()
1✔
901
        calibjobs['accounted_for']['psfnight'] = True
1✔
902

903
    ######## Submit flats and nightlyflat ########
904
    ## If nightlyflat defined we don't need to process more normal flats
905
    if len(flats) > 0 and not calibjobs['accounted_for']['fiberflatnight']:
1✔
906
        flat_prows = []
1✔
907
        for flat_erow in flats:
1✔
908
            if flat_erow['EXPID'] in processed_cal_expids:
1✔
909
                matches = np.where([flat_erow['EXPID'] in itterprow['EXPID']
×
910
                                    for itterprow in ptable])[0]
911
                if len(matches) == 1:
×
912
                    prow = ptable[matches[0]]
×
913
                    log.info("Found existing flat prow in ptable, " 
×
914
                             + f"including it for nightlyflat job: {list(prow)}")
915
                    flat_prows.append(prow)
×
916
                continue
×
917

918
            jobdesc = 'flat'
1✔
919
            prow, int_id = make_exposure_prow(flat_erow, int_id, calibjobs,
1✔
920
                                              jobdesc=jobdesc)
921
            prow, ptable = create_submit_add_and_save(prow, ptable)
1✔
922
            flat_prows.append(prow)
1✔
923

924
        joint_prow, int_id = make_joint_prow(flat_prows, descriptor='nightlyflat',
1✔
925
                                             internal_id=int_id)
926
        ptable = set_calibrator_flag(flat_prows, ptable)
1✔
927
        if 'nightlyflat' in cal_override:
1✔
928
            extra_args = cal_override['nightlyflat']
1✔
929
        else:
930
            extra_args = None
1✔
931
        joint_prow, ptable = create_submit_add_and_save(joint_prow, ptable,
1✔
932
                                                        extra_job_args=extra_args)
933
        calibjobs[joint_prow['JOBDESC']] = joint_prow.copy()
1✔
934
        calibjobs['accounted_for']['fiberflatnight'] = True
1✔
935
        
936
    ######## Submit cte flats ########
937
    jobdesc = 'flat'
1✔
938
    for cte_erow in ctes:
1✔
939
        if cte_erow['EXPID'] in processed_cal_expids:
1✔
940
            continue
×
941
        prow, int_id = make_exposure_prow(cte_erow, int_id, calibjobs,
1✔
942
                                      jobdesc=jobdesc)
943
        prow, ptable = create_submit_add_and_save(prow, ptable)
1✔
944

945
    return ptable, calibjobs, int_id
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc