• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

desihub / desisurvey / 17651900636

11 Sep 2025 05:08PM UTC coverage: 35.141% (+4.8%) from 30.363%
17651900636

Pull #190

github

weaverba137
move requirements into setup.cfg
Pull Request #190: Update test matrix for NumPy 2 compatibility

1892 of 5384 relevant lines covered (35.14%)

0.35 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

61.03
/py/desisurvey/optimize.py
1
"""Optimize future DESI observations.
2
"""
3
from __future__ import print_function, division
1✔
4

5
import pkg_resources
1✔
6

7
import numpy as np
1✔
8
import scipy.special
1✔
9

10
import astropy.table
1✔
11
import astropy.coordinates
1✔
12
import astropy.units as u
1✔
13

14
import desiutil.log
1✔
15

16
import desisurvey.config
1✔
17

18

19
def wrap(angle, offset):
1✔
20
    """Wrap values in the range [0, 360] to [offset, offset+360].
21
    """
22
    return np.fmod(angle - offset + 360, 360) + offset
1✔
23

24

25
class Optimizer(object):
1✔
26
    """Initialize the hour angle assignments for specified tiles.
27

28
    See `DESI-3060
29
    <https://desi.lbl.gov/DocDB/cgi-bin/private/ShowDocument?docid=3060>`__
30
    for details.
31

32
    Parameters
33
    ----------
34
    condition : 'DARK', 'GRAY' or 'BRIGHT'
35
        Which obsconditions to optimize.
36
    lst_edges : array
37
        Array of N+1 LST bin edges.
38
    lst_hist : array
39
        Array of N bins giving the available LST distribution in units of
40
        sidereal hours per bin.
41
    subset : array or None
42
        An array of tile ID values to optimize.
43
        Optimizes all tiles with the relevant conditions if None.
44
    start : date or None
45
        Only consider available LST starting from this date.  Use the
46
        nominal survey start date if None.
47
    stop : date or None
48
        Only consider available LST before this end date.  Use the nominal
49
        survey stop date if None.
50
    init : 'zero', 'flat' or 'array'
51
        Method for initializing tile hour angles: 'zero' sets all hour angles
52
        to zero, 'flat' matches the CDF of available LST to planned LST (without
53
        accounting for exposure time), 'array' initializes from the initial_ha
54
        argument.
55
    initial_ha : array or None
56
        Only used when init is 'array'. The subset arg must also be provided
57
        to specify which tile each HA applies to.
58
    stretch : float
59
        Amount to stretch exposure times to account for factors other than
60
        dust and airmass (i.e., seeing, transparency, moon). This does not
61
        have a big effect on the results so can be approximate.
62
    smoothing_radius : :class:`astropy.units.Quantity`
63
        Gaussian sigma for calculating smoothing weights with angular units.
64
    center : float or None
65
        Used by the 'flat' initialization method to specify the starting
66
        DEC for the CDF balancing algorithm. When None, the 'flat' method
67
        scans over a grid of center values and picks the best one, but this
68
        is relatively slow.  Ignored unless init is 'flat'.
69
    seed : int or None
70
        Random number seed to use for stochastic elements of the optimizer.
71
        Do not use None if reproducible results are required.
72
    weights : array
73
        Array of relative weights to use when selecting which LST bin to
74
        optimize next.  Candidate bins are ordered by an estimated improvement.
75
        The length of the weights array determines how many candidates to
76
        consider, in decreasing order, and the weight values determines their
77
        relative weight. The next bin to optimize is then selected at random.
78
    completed : array
79
        Array of tileid, donefrac_{bright/gray/dark} indicating what fraction
80
        of particular tiles has already been observed in a particular
81
        condition.
82
    """
83
    def __init__(self, condition, lst_edges, lst_hist, subset=None, start=None, stop=None,
1✔
84
                 init='flat', initial_ha=None, stretch=1.0, smoothing_radius=10,
85
                 center=None, seed=123, weights=[5, 4, 3, 2, 1],
86
                 completed=None):
87

88
        tiles = desisurvey.tiles.get_tiles()
1✔
89
        if not isinstance(smoothing_radius, u.Quantity):
1✔
90
            smoothing_radius = smoothing_radius * u.deg
1✔
91
        self.log = desiutil.log.get_logger()
1✔
92
        config = desisurvey.config.Configuration()
1✔
93
        self.gen = np.random.RandomState(seed)
1✔
94
        self.cum_weights = np.asarray(weights, float).cumsum()
1✔
95
        self.cum_weights /= self.cum_weights[-1]
1✔
96
        self.stretch = stretch
1✔
97

98
        if start is None:
1✔
99
            start = config.first_day()
1✔
100
        else:
101
            start = desisurvey.utils.get_date(start)
×
102
        if stop is None:
1✔
103
            stop = config.last_day()
1✔
104
        else:
105
            stop = desisurvey.utils.get_date(stop)
×
106
        if start >= stop:
1✔
107
            raise ValueError('Expected start < stop.')
×
108

109
        self.lst_hist = np.asarray(lst_hist)
1✔
110
        self.lst_hist_sum = self.lst_hist.sum()
1✔
111
        self.nbins = len(lst_hist)
1✔
112
        self.lst_edges = np.asarray(lst_edges)
1✔
113
        if self.lst_edges.shape != (self.nbins + 1,):
1✔
114
            raise ValueError('Inconsistent lengths of lst_hist and lst_edges.')
×
115
        self.lst_centers = 0.5 * (self.lst_edges[1:] + self.lst_edges[:-1])
1✔
116
        self.origin = self.lst_edges[0]
1✔
117
        self.binsize = 360. / self.nbins
1✔
118

119
        # Select the tiles to plan.
120
        if subset is not None:
1✔
121
            idx = tiles.index(subset)
1✔
122
            # Check that all tiles in the subset are observable in these
123
            # conditions.
124
            if not np.all(tiles.allowed_in_conditions(condition)[idx]):
1✔
125
                raise ValueError('Subset contains non-{} tiles.'.format(condition))
×
126
            tile_sel = np.zeros(tiles.ntiles, bool)
1✔
127
            tile_sel[idx] = True
1✔
128
        else:
129
            # Use all tiles in the program by default.
130
            tile_sel = tiles.allowed_in_conditions(condition)
×
131

132
        # Get nominal exposure time for this program,
133
        # converted to LST equivalent in degrees.
134
        texp_nom = u.Quantity([
1✔
135
            getattr(config.programs, program).efftime()
136
            for program in tiles.tileprogram[tile_sel]])
137
        moon_up_factor = getattr(config.conditions, condition).moon_up_factor()
1✔
138
        texp_nom *= moon_up_factor
1✔
139
        if completed is not None:
1✔
140
            # completed = astropy.table.Table.read(completed)
141
            # donefrac = np.zeros(tiles.ntiles, dtype='f4')
142
            from astropy.io import fits
×
143
            completed = fits.getdata(completed)
×
144
            idx, mask = tiles.index(completed['TILEID'], return_mask=True)
×
145
            idx = idx[mask]
×
146
            boost = np.ones(tiles.ntiles, dtype='f4')
×
147
            boost[idx] = (completed['NNIGHT_NEEDED'][mask] -
×
148
                          completed['NNIGHT'][mask])
149
            boost = np.clip(boost, 0, np.inf)
×
150
            # donefrac[idx] = completed['donefrac'][mask]
151
            # boost = np.clip(1-donefrac, 0, 1)
152
            texp_nom *= boost[tile_sel]
×
153

154
        self.dlst_nom = 360 * texp_nom.to(u.day).value / 0.99726956583
1✔
155

156
        # Save arrays for the tiles to plan.
157
        self.ra = wrap(tiles.tileRA[tile_sel], self.origin)
1✔
158
        self.dec = tiles.tileDEC[tile_sel]
1✔
159
        self.tid = tiles.tileID[tile_sel]
1✔
160
        self.idx = np.where(tile_sel)[0]
1✔
161
        self.ntiles = len(self.ra)
1✔
162

163
        self.max_abs_ha = tiles.max_abs_ha[tile_sel]
1✔
164

165
        # Lookup static dust exposure factors for each tile.
166
        self.dust_factor = tiles.dust_factor[tile_sel]
1✔
167

168
        # Initialize smoothing weights.
169
        self.init_smoothing(smoothing_radius)
1✔
170

171
        self.log.info(
1✔
172
            '{0}: {1:.1f}h for {2} tiles (texp_nom {3:.1f}).'
173
            .format(condition, self.lst_hist_sum, self.ntiles,
174
                    np.median(texp_nom)))
175

176
        # Initialize metric histories.
177
        self.scale_history = []
1✔
178
        self.loss_history = []
1✔
179
        self.RMSE_history = []
1✔
180

181
        # Initialize improve() counters.
182
        self.nslow = 0
1✔
183
        self.nimprove = 0
1✔
184
        self.nsmooth = 0
1✔
185

186
        # Calculate schedule plan with HA=0 asignments to establish
187
        # the smallest possible total exposure time.
188
        self.plan_tiles = self.get_plan(np.zeros(self.ntiles))
1✔
189
        self.use_plan(save_history=False)
1✔
190
        self.min_total_time = self.plan_hist.sum()
1✔
191

192
        self.plan_tiles = self.get_plan(np.zeros(self.ntiles))
1✔
193
        self.use_plan()
1✔
194
        self.plan_hist_ha0 = self.plan_hist.copy()
1✔
195

196
        # Initialize HA assignments for each tile.
197
        if init == 'zero':
1✔
198
            self.ha = np.zeros(self.ntiles)
1✔
199
        elif init == 'array':
×
200
            if subset is None:
×
201
                raise ValueError('Must specify subset when init is "array".')
×
202
            if len(initial_ha) != self.ntiles:
×
203
                raise ValueError('Array initial_ha has wrong length.')
×
204
            self.ha = np.asarray(initial_ha)
×
205
        elif init == 'flat':
×
206
            if center is None:
×
207
                # Try 72 equally spaced centers.
208
                icenters = np.arange(0, self.nbins, self.nbins // 72)
×
209
            else:
210
                # Find the closest bin edge to the requested value.
211
                icenters = [np.argmin(np.abs(center - lst_edges))]
×
212
            min_score = np.inf
×
213
            scores = []
×
214
            for icenter in icenters:
×
215
                # Rotate the available LST histogram to this new center.
216
                hist = np.roll(self.lst_hist, -icenter)
×
217
                center = self.lst_edges[icenter]
×
218
                edges = np.linspace(center, center + 360, self.nbins + 1)
×
219
                # Calculate the CDF of available LST.
220
                lst_cdf = np.zeros_like(edges)
×
221
                lst_cdf[1:] = np.cumsum(hist)
×
222
                lst_cdf /= lst_cdf[-1]
×
223
                lst_cen = (edges[1:]+edges[:-1])/2
×
224
                lst_cdf = (lst_cdf[1:]+lst_cdf[:-1])/2
×
225
                # Calculate the CDF of planned LST usage relative to the same
226
                # central LST, assuming HA=0. Instead of spreading each exposure
227
                # over multiple LST bins, add its entire HA=0 exposure time at
228
                # LST=RA.
229
                exptime, _ = self.get_exptime(ha=np.zeros(self.ntiles))
×
230
                tile_ra = wrap(tiles.tileRA[tile_sel], center)
×
231
                sort_idx = np.argsort(tile_ra)
×
232
                tile_cdf = np.cumsum(exptime[sort_idx])
×
233
                tile_cdf /= tile_cdf[-1]
×
234
                tile_cdf = (np.concatenate([[0], tile_cdf[:-1]])+tile_cdf)/2
×
235
                # Use linear interpolation to find an LST for each tile that
236
                # matches the plan CDF to the available LST CDF.
237
                new_lst = np.interp(tile_cdf, lst_cdf, lst_cen)
×
238
                # Calculate each tile's HA as the difference between its HA=0
239
                # LST and its new LST after CDF matching.
240
                ha = np.empty(self.ntiles)
×
241
                ha[sort_idx] = np.fmod(new_lst - tile_ra[sort_idx], 360)
×
242
                # Clip tiles to their airmass limits.
243
                ha = np.clip(ha, -self.max_abs_ha, +self.max_abs_ha)
×
244
                # Calculate the score for this HA assignment.
245
                self.plan_tiles = self.get_plan(ha)
×
246
                self.use_plan(save_history=False)
×
247
                scores.append(self.eval_score(self.plan_hist))
×
248
                # Keep track of the best score found so far.
249
                if scores[-1] < min_score:
×
250
                    self.ha = ha.copy()
×
251
                    min_score = scores[-1]
×
252
                    center_best = center
×
253
            self.log.info(
×
254
                'Center flat initial HA assignments at LST {:.0f} deg.'
255
                .format(center_best))
256
        else:
257
            raise ValueError('Invalid init option: {0}.'.format(init))
×
258

259
        # Check initial HA assignments against airmass limits.
260
        ha_clipped = np.clip(self.ha, -self.max_abs_ha, +self.max_abs_ha)
1✔
261
        if not np.all(self.ha == ha_clipped):
1✔
262
            delta = np.abs(self.ha - ha_clipped)
×
263
            idx = np.argmax(delta)
×
264
            self.log.warn('Clipped {0} HA assignments to airmass limits.'
×
265
                          .format(np.count_nonzero(delta)))
266
            self.log.warn('Max clip is {0:.1f} deg for tile {1}.'
×
267
                          .format(delta[idx], self.tid[idx]))
268
            self.ha = ha_clipped
×
269

270
        # Calculate schedule plan with initial HA asignments.
271
        self.plan_tiles = self.get_plan(self.ha)
1✔
272
        self.use_plan()
1✔
273
        self.ha_initial = self.ha.copy()
1✔
274
        self.num_adjustments = np.zeros(self.ntiles, int)
1✔
275

276
    def get_exptime(self, ha, subset=None):
1✔
277
        """Estimate exposure times for the specified tiles.
278

279
        Estimates account for airmass and dust extinction only.
280

281
        Parameters
282
        ----------
283
        ha : array
284
            Array of hour angle assignments in degrees.
285
        subset : array or None
286
            Restrict calculation to a subset of tiles specified by the
287
            indices in this array, or use all tiles pass to the constructor
288
            if None.
289

290
        Returns
291
        -------
292
        tuple
293
            Tuple (exptime, subset) where exptime is an array of estimated
294
            exposure times in degrees and subset is the input subset or
295
            else a slice initialized for all tiles.
296
        """
297
        # Calculate for all tiles if no subset is specified.
298
        if subset is None:
1✔
299
            subset = slice(None)
1✔
300
        # Calculate best-case exposure times (i.e., using only airmass & dust)
301
        # in degrees for the specified HA assignments.
302
        tiles = desisurvey.tiles.get_tiles()
1✔
303
        X = tiles.airmass(ha, self.idx[subset])
1✔
304
        exptime = (self.dlst_nom[subset] *
1✔
305
                   desisurvey.etc.airmass_exposure_factor(X) *
306
                   self.dust_factor[subset]) * self.stretch
307
        return exptime, subset
1✔
308

309
    def get_plan(self, ha, subset=None):
1✔
310
        """Calculate an LST usage plan for specified hour angle assignments.
311

312
        Parameters
313
        ----------
314
        ha : array
315
            Array of hour angle assignments in degrees.
316
        subset : array or None
317
            Restrict calculation to a subset of tiles specified by the
318
            indices in this array, or use all tiles pass to the constructor
319
            if None.
320

321
        Returns
322
        -------
323
        array
324
            Array of shape (ntiles, nbins) giving the exposure time in hours
325
            that each tile needs in each LST bin. When a subset is specified,
326
            ntiles only indexes tiles in the subset.
327
        """
328
        exptime, subset = self.get_exptime(ha, subset)
1✔
329
        # Calculate LST windows for each tile's exposure.
330
        lst_mid = self.ra[subset] + ha
1✔
331
        lst_min = np.fmod(
1✔
332
            lst_mid - 0.5 * exptime - self.origin + 360, 360) + self.origin
333
        ##assert np.all(lst_min >= self.origin)
334
        ##assert np.all(lst_min < self.origin + 360)
335
        lst_max = np.fmod(
1✔
336
            lst_mid + 0.5 * exptime - self.origin + 360, 360) + self.origin
337
        ##assert np.all(lst_max >= self.origin)
338
        ##assert np.all(lst_max < self.origin + 360)
339
        # Calculate each exposure's overlap with each LST bin.
340
        lo = np.clip(
1✔
341
            self.lst_edges[1:] - lst_min[:, np.newaxis], 0, self.binsize)
342
        hi = np.clip(
1✔
343
            lst_max[:, np.newaxis] - self.lst_edges[:-1], 0, self.binsize)
344
        plan = lo + hi
1✔
345
        plan[lst_max >= lst_min] -= self.binsize
1✔
346
        ##assert np.allclose(plan.sum(axis=1), exptime)
347
        # Convert from degrees to sidereal hours.
348
        return plan * 24. / 360.
1✔
349

350
    def eval_score(self, plan_hist):
1✔
351
        """Evaluate the score that improve() tries to minimize.
352

353
        Score is calculated as 100 * RMSE + 100 * loss.
354

355
        Parameters
356
        ----------
357
        plan_hist : array
358
            Histogram of planned LST usage for all tiles.
359

360
        Returns
361
        -------
362
        float
363
            Score value.
364
        """
365
        return (
1✔
366
            100 * self.eval_RMSE(plan_hist) +
367
            100 * self.eval_loss(plan_hist))
368

369
    def eval_RMSE(self, plan_hist):
1✔
370
        """Evaluate the mean-squared error metric for the specified plan.
371

372
        This is the metric that :meth:`optimize` attempts to improve. It
373
        measures the similarity of the available and planned LST histogram
374
        shapes, but not their normalizations.  A separate :meth:`eval_scale`
375
        metric measures how efficiently the plan uses the available LST.
376

377
        The histogram of available LST is rescaled to the same total time (area)
378
        before calculating residuals relative to the planned LST usage.
379

380
        RMSE values are scaled by (10K / ntiles) so the absolute metric value
381
        is more consistent when ntiles is varied.
382

383
        Parameters
384
        ----------
385
        plan_hist : array
386
            Histogram of planned LST usage for all tiles.
387

388
        Returns
389
        -------
390
        float
391
            Mean squared error value.
392
        """
393
        # Rescale the available LST total time to the plan total time.
394
        plan_sum = plan_hist.sum()
1✔
395
        scale = plan_sum / self.lst_hist_sum
1✔
396
        residuals = plan_hist - scale * self.lst_hist
1✔
397
        return np.sqrt(residuals.dot(residuals) * self.nbins) / plan_sum
1✔
398

399
    def eval_loss(self, plan_hist):
1✔
400
        """Evaluate relative loss of current plan relative to HA=0 plan.
401

402
        Calculated as (T-T0)/T0 where T is the total exposure time of the
403
        current plan and T0 is the total exposure time of an HA=0 plan.
404

405
        Parameters
406
        ----------
407
        plan_hist : array
408
            Histogram of planned LST usage for all tiles.
409

410
        Returns
411
        -------
412
        float
413
            Loss factor.
414
        """
415
        return plan_hist.sum() / self.min_total_time - 1.0
1✔
416

417
    def eval_scale(self, plan_hist):
1✔
418
        """Evaluate the efficiency of the specified plan.
419

420
        Calculates the minimum scale factor applied to the available
421
        LST histogram so that the planned LST usage is always <= the scaled
422
        available LST histogram.  This value can be intepreted as the fraction
423
        of the available time required to complete all tiles.
424

425
        This metric is only loosely correlated with the RMSE metric, so provides
426
        a useful independent check that the optimization is producing the
427
        desired results.
428

429
        This metric is not well defined if any bin of the available LST
430
        histogram is empty, which indicates that some tiles will not be
431
        observable during the [start:stop] range being optimized.  In this case,
432
        only bins with some available LST are included in the scale calculation.
433

434
        Parameters
435
        ----------
436
        plan_hist : array
437
            Histogram of planned LST usage for all tiles.
438

439
        Returns
440
        -------
441
        float
442
            Scale factor.
443
        """
444
        nonzero = self.lst_hist > 0
1✔
445
        return (plan_hist[nonzero] / self.lst_hist[nonzero]).max()
1✔
446

447
    def use_plan(self, save_history=True):
1✔
448
        """Use the current plan and update internal arrays.
449

450
        Calculates the `plan_hist` arrays from the per-tile `plan_tiles` array,
451
        and records the current values of the RMSE and scale metrics.
452
        """
453
        self.plan_hist = self.plan_tiles.sum(axis=0)
1✔
454
        if not np.all(np.isfinite(self.plan_hist)):
1✔
455
            raise RuntimeError('Found invalid plan_tiles in use_plan().')
×
456
        if save_history:
1✔
457
            self.scale_history.append(self.eval_scale(self.plan_hist))
1✔
458
            self.loss_history.append(self.eval_loss(self.plan_hist))
1✔
459
            self.RMSE_history.append(self.eval_RMSE(self.plan_hist))
1✔
460

461
    def next_bin(self):
1✔
462
        """Select which LST bin to adjust next.
463

464
        The algorithm determines which bin of the planned LST usage histogram
465
        should be decreased in order to maximize the decrease of the score,
466
        assuming that the decrease is moved to one of the neighboring bins.
467

468
        Since each tile's contribution to the plan can, in general, span several
469
        LST bins and can change its area (exposure time) when its HA is
470
        adjusted, the assumptions of this algorithm are not valid in detail
471
        but it usually does a good job anyway.
472

473
        This algorithm has a stochastic component controlled by the `weights`
474
        parameter passed to our constructor, in order to avoid getting stuck
475
        in a local minimum.
476

477
        Returns
478
        -------
479
        tuple
480
            Tuple (idx, dha_sign) where idx is the LST bin index that should
481
            be decreased (by moving one of the tiles contributing to it) and
482
            dha_sign gives the sign +/-1 of the HA adjustment required.
483
        """
484
        # Rescale the available LST total time to the current plan total time.
485
        A = self.lst_hist * self.plan_hist.sum() / self.lst_hist_sum
1✔
486
        # Calculate residuals in each LST bin.
487
        P = self.plan_hist
1✔
488
        res = P - A
1✔
489
        # Calculate the change from moving tiles between adjacent bins.
490
        nbins = len(self.lst_hist)
1✔
491
        adjacent = np.roll(np.arange(nbins), -1)
1✔
492
        dres = res[adjacent] - res
1✔
493
        # Cannot move tiles from an empty bin.
494
        empty = (P == 0)
1✔
495
        dres[empty & (dres < 0)] = 0.
1✔
496
        dres[empty[adjacent] & (dres > 0)] = 0.
1✔
497
        # Select the movements that reduce the RMSE between A and P by
498
        # the largest amounts.
499
        order = np.argsort(np.abs(dres))[::-1][:len(self.cum_weights)]
1✔
500
        # Randomly select one of these movements, according to our weights.
501
        which = np.searchsorted(self.cum_weights, self.gen.uniform())
1✔
502
        idx = order[which]
1✔
503
        if dres[idx] == 0:
1✔
504
            raise RuntimeError('Cannot improve RMSE.')
×
505
        elif dres[idx] > 0:
1✔
506
            idx = adjacent[idx]
1✔
507
            dha_sign = -1
1✔
508
        else:
509
            dha_sign = +1
1✔
510
        return idx, dha_sign
1✔
511

512
    def improve(self, frac=1.):
1✔
513
        """Perform one iteration of improving the hour angle assignments.
514

515
        Each call will adjust the HA of a single tile with a magnitude \|dHA\|
516
        specified by the `frac` parameter.
517

518
        Parameters
519
        ----------
520
        frac : float
521
            Mean fraction of an LST bin to adjust the selected tile's HA by.
522
            Actual HA adjustments are randomly distributed around this mean
523
            to smooth out adjustments.
524
        """
525
        # Randomly perturb the size of the HA adjustment.  This adds some
526
        # noise but also makes it possible to get out of dead ends.
527
        frac = np.minimum(2., self.gen.rayleigh(
1✔
528
            scale=np.sqrt(2 / np.pi) * frac))
529
        # Calculate the initial score.
530
        initial_score = self.eval_score(self.plan_hist)
1✔
531
        # Try a fast method first, then fall back to a slower method.
532
        for method in 'next_bin', 'any_bin':
1✔
533
            if method == 'next_bin':
1✔
534
                # Select which bin to move a tile from and in which direction.
535
                ibin, dha_sign = self.next_bin()
1✔
536
                # Find tiles using more time in this LST bin than in the
537
                # adjacent bin they would be moving away from.
538
                ibin_from = (ibin - dha_sign + self.nbins) % self.nbins
1✔
539
                sel = (self.plan_tiles[:, ibin] > 0) & (
1✔
540
                    self.plan_tiles[:, ibin] >= self.plan_tiles[:, ibin_from])
541
            else:
542
                # Try a 20% random subset of all tiles.
543
                self.nslow += 1
×
544
                sel = self.gen.permutation(self.ntiles) < (self.ntiles // 5)
×
545
                # Randomly select a direction to shift the next tile.
546
                dha_sign = +1 if self.gen.uniform() > 0.5 else -1
×
547

548
            # Do not move any tiles that are already at their |HA| limits.
549
            dha = 360. / self.nbins * frac * dha_sign
1✔
550
            veto = np.abs(self.ha + dha) >= self.max_abs_ha
1✔
551
            sel[veto] = False
1✔
552
            # Are there any tiles available to adjust?
553
            nsel = np.count_nonzero(sel)
1✔
554
            if nsel == 0:
1✔
555
                self.log.debug('No tiles available for {0} method.'
×
556
                               .format(method))
557
                continue
×
558
            # How many times have these tiles already been adjusted?
559
            nadj = self.num_adjustments[sel]
1✔
560
            if np.min(nadj) < np.max(nadj):
1✔
561
                # Do not adjust a tile that already has received the max
562
                # number of adjustments.  This has the effect of smoothing
563
                # the spatial distribution of HA adjustments.
564
                sel = sel & (self.num_adjustments < np.max(nadj))
1✔
565
                nsel = np.count_nonzero(sel)
1✔
566
            subset = np.where(sel)[0]
1✔
567
            # Calculate how the plan changes by moving each selected tile.
568
            scenario = self.get_plan(self.ha[subset] + dha, subset)
1✔
569
            new_score = np.zeros(nsel)
1✔
570
            for i, itile in enumerate(subset):
1✔
571
                # Calculate the (downsampled) plan when this tile is moved.
572
                new_plan_hist = (
1✔
573
                    self.plan_hist - self.plan_tiles[itile] + scenario[i])
574
                new_score[i]= self.eval_score(new_plan_hist)
1✔
575
            i = np.argmin(new_score)
1✔
576
            if new_score[i] > initial_score:
1✔
577
                # All candidate adjustments give a worse score.
578
                continue
×
579
            # Accept the tile that gives the smallest score.
580
            itile = subset[i]
1✔
581
            self.num_adjustments[itile] += 1
1✔
582
            # Update the plan.
583
            self.ha[itile] = self.ha[itile] + dha
1✔
584
            assert np.abs(self.ha[itile]) < self.max_abs_ha[itile]
1✔
585
            self.plan_tiles[itile] = scenario[i]
1✔
586
            # No need to try additional methods.
587
            break
1✔
588
        self.use_plan()
1✔
589
        self.nimprove += 1
1✔
590

591
    def init_smoothing(self, radius):
1✔
592
        """Calculate and save smoothing weights.
593

594
        Weights for each pair of tiles [i,j] are calculated as::
595

596
            wgt[i,j] = exp(-0.5 * (sep[i,j]/radius) ** 2)
597

598
        where sep[i,j] is the separation angle between the tile centers.
599

600
        Parameters
601
        ----------
602
        radius : astropy.units.Quantity
603
            Gaussian sigma for calculating weights with angular units.
604
        """
605
        separations = desisurvey.utils.separation_matrix(
1✔
606
            self.ra, self.dec, self.ra, self.dec)
607
        ratio = separations / radius.to(u.deg).value
1✔
608
        self.smoothing_weights = np.exp(-0.5 * ratio ** 2)
1✔
609
        # Set self weight to zero.
610
        self_weights = np.diag(self.smoothing_weights)
1✔
611
        assert np.allclose(self_weights, 1.)
1✔
612
        self.smoothing_weights -= np.diag(self_weights)
1✔
613
        self.smoothing_sums = self.smoothing_weights.sum(axis=1)
1✔
614

615
    def smooth(self, alpha=0.1):
1✔
616
        """Smooth the current HA assignments.
617

618
        Each HA is replaced with a smoothed value::
619

620
            (1-alpha) * HA + alpha * HA[avg]
621

622
        where HA[avg] is the weighted average of all other tile HA assignments.
623
        """
624
        avg_ha = self.smoothing_weights.dot(self.ha) / self.smoothing_sums
1✔
625
        self.ha = (1 - alpha) * self.ha + alpha * avg_ha
1✔
626
        self.plan_tiles = self.get_plan(self.ha)
1✔
627
        self.use_plan()
1✔
628

629
    def plot(self, save=None, relative=True):
1✔
630
        """Plot the current optimzation status.
631

632
        Requires that matplotlib is installed.
633

634
        Parameters
635
        ----------
636
        save : str or None
637
            Filename where the generated plot will be saved.
638
        """
639
        import matplotlib.pyplot as plt
×
640
        fig, ax = plt.subplots(2, 2, figsize=(12, 8))
×
641
        ax = ax.flatten()
×
642
        scale = self.scale_history[-1]
×
643
        ax[0].hist(self.lst_centers, bins=self.lst_edges,
×
644
                 weights=self.lst_hist * scale, histtype='stepfilled',
645
                 fc=(1,0,0,0.25), ec='r')
646
        ax[0].hist(self.lst_centers, bins=self.lst_edges,
×
647
                 weights=self.plan_hist, histtype='stepfilled',
648
                 fc=(0.7,0.7,1), ec='b')
649
        RMSE_scale = self.plan_hist.sum() / self.lst_hist_sum
×
650
        ax[0].hist(self.lst_centers, bins=self.lst_edges,
×
651
                 weights=self.lst_hist * RMSE_scale, histtype='step',
652
                 fc=(1,0,0,0.25), ec='r', ls='--')
653

654
        nbins = len(self.lst_centers)
×
655
        idx, dha = self.next_bin()
×
656
        ax[0].axvline(self.lst_centers[idx], c='b', ls='-')
×
657
        ax[0].axvline(
×
658
            self.lst_centers[(idx + nbins + dha) % nbins], c='b', ls='--')
659

660
        ax[0].set_xlim(self.lst_edges[0], self.lst_edges[-1])
×
661
        ax[0].set_ylim(0, None)
×
662
        ax[0].set_xlabel('Local Sidereal Time [deg]')
×
663
        ax[0].set_ylabel('Hours / LST Bin')
×
664

665
        ax[1].plot(self.RMSE_history, 'r.', ms=10, label='RMSE')
×
666
        ax[1].legend(loc='lower left', numpoints=1)
×
667
        rhs = ax[1].twinx()
×
668
        rhs.plot(self.scale_history, 'kx', ms=5, label='Scale')
×
669
        #rhs.plot(self.loss_history, 'g+', ms=5, label='Loss')
670
        rhs.legend(loc='upper right', numpoints=1)
×
671
        ax[1].set_xlim(-0.1, len(self.RMSE_history) - 0.9)
×
672
        ax[1].set_xlabel('Iterations')
×
673

674
        ax[2].hist(self.ha, bins=50, histtype='stepfilled')
×
675
        ax[2].set_xlabel('Tile Design Hour Angle [deg]')
×
676

677
        if relative:
×
678
            c = self.ha - self.ha_initial
×
679
            clabel = 'Tile Hour Angle Adjustments [deg]'
×
680
        else:
681
            c = self.ha
×
682
            clabel = 'Tile Hour Angle [deg]'
×
683
        s = ax[3].scatter(self.ra, self.dec, c=c, s=12, lw=0, cmap='jet')
×
684
        ax[3].set_xlim(self.lst_edges[0] - 5, self.lst_edges[-1] + 5)
×
685
        ax[3].set_xticks([])
×
686
        ax[3].set_xlabel(clabel)
×
687
        ax[3].set_ylim(-20, 80)
×
688
        ax[3].set_yticks([])
×
689
        cbar = plt.colorbar(s, ax=ax[3], orientation='vertical',
×
690
                            fraction=0.05, pad=0.01, format='%.1f')
691
        cbar.ax.tick_params(labelsize=9)
×
692
        plt.tight_layout()
×
693
        if save:
×
694
            plt.savefig(save)
×
695
        else:
696
            plt.show()
×
697

698

699
if __name__ == '__main__':
1✔
700
    """This should eventually be made into a first-class script entry point.
×
701
    """
702
    opt = Optimizer(scheduler, 'GRAY')
×
703
    for i in range(10):
×
704
        opt.improve(frac=0.25, verbose=True)
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc