• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

desy-multimessenger / nuztf / 3930936208

pending completion
3930936208

Pull #214

github-actions

GitHub
Merge 300ea18d8 into 8aa7afee4
Pull Request #214: Make GCN draft even if info is missing

21 of 21 new or added lines in 1 file covered. (100.0%)

1853 of 2248 relevant lines covered (82.43%)

0.82 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

78.32
/nuztf/base_scanner.py
1
#!/usr/bin/env python3
2
# coding: utf-8
3

4
import os
1✔
5
import logging
1✔
6
import pickle
1✔
7
import backoff
1✔
8
import requests
1✔
9
import pandas
1✔
10
import healpy as hp
1✔
11
import numpy as np
1✔
12
from tqdm import tqdm
1✔
13
import matplotlib.pyplot as plt
1✔
14
import matplotlib.patches as mpatches
1✔
15
from matplotlib.backends.backend_pdf import PdfPages
1✔
16
from astropy.time import Time
1✔
17
from astropy import units as u
1✔
18
from astropy.coordinates import SkyCoord, Distance
1✔
19
from astropy.cosmology import FlatLambdaCDM
1✔
20
from ztfquery import fields as ztfquery_fields
1✔
21
from gwemopt.ztf_tiling import get_quadrant_ipix
1✔
22
from ampel.ztf.t0.DecentFilter import DecentFilter
1✔
23
from ampel.ztf.dev.DevAlertConsumer import DevAlertConsumer
1✔
24
from ampel.ztf.alert.ZiAlertSupplier import ZiAlertSupplier
1✔
25
from ampel.ztf.dev.ZTFAlert import ZTFAlert
1✔
26

27
from nuztf.ampel_api import (
1✔
28
    ampel_api_cone,
29
    ampel_api_timerange,
30
    ampel_api_name,
31
    ampel_api_lightcurve,
32
    ampel_api_skymap,
33
    ensure_cutouts,
34
)
35
from nuztf.cat_match import get_cross_match_info, ampel_api_tns, query_ned_for_z
1✔
36
from nuztf.observations import get_obs_summary
1✔
37
from nuztf.plot import lightcurve_from_alert
1✔
38
from nuztf.utils import cosmo
1✔
39
from nuztf.fritz import save_source_to_group
1✔
40
from nuztf.flatpix import get_flatpix
1✔
41

42
DEBUG = False
1✔
43
RATELIMIT_CALLS = 10
1✔
44
RATELIMIT_PERIOD = 1
1✔
45

46

47
class BaseScanner:
1✔
48

49
    default_fritz_group = 1430
1✔
50

51
    def __init__(
1✔
52
        self,
53
        run_config,
54
        t_min,
55
        resource=None,
56
        filter_class=DecentFilter,
57
        cone_nside=64,
58
        cones_to_scan=None,
59
        logger=None,
60
    ):
61
        self.cone_nside = cone_nside
1✔
62
        self.t_min = t_min
1✔
63
        (
1✔
64
            self.map_coords,
65
            self.pixel_nos,
66
            self.nside,
67
            self.map_probs,
68
            self.data,
69
            self.pixel_area,
70
            self.key,
71
        ) = self.unpack_skymap()
72

73
        if not hasattr(self, "prob_threshold"):
1✔
74
            self.prob_threshold = None
×
75

76
        if resource is None:
1✔
77
            resource = {
1✔
78
                "ampel-ztf/catalogmatch": "https://ampel.zeuthen.desy.de/api/catalogmatch/",
79
            }
80

81
        if logger is None:
1✔
82
            self.logger = logging.getLogger(__name__)
1✔
83
        else:
84
            self.logger = logger
×
85

86
        self.logger.info("AMPEL run config:")
1✔
87
        self.logger.info(run_config)
1✔
88

89
        lvl = self.logger.level
1✔
90

91
        if lvl > 10:
1✔
92
            logger_ampel = logging.getLogger("AMPEL_filter")
×
93
            logger_ampel.setLevel(logging.WARNING)
×
94
        else:
95
            from ampel.log.AmpelLogger import AmpelLogger
1✔
96

97
            logger_ampel = AmpelLogger()
1✔
98

99
        self.ampel_filter_class = filter_class(
1✔
100
            logger=logger_ampel, resource=resource, **run_config
101
        )
102
        self.ampel_filter_class.post_init()
1✔
103

104
        self.dac = DevAlertConsumer(self.ampel_filter_class)
1✔
105

106
        self.scanned_pixels = []
1✔
107

108
        if cones_to_scan is None:
1✔
109
            self.cone_ids, self.cone_coords = self.find_cone_coords()
1✔
110
        else:
111
            self.cone_ids, self.cone_coords = cones_to_scan
×
112

113
        self.cache = dict()
1✔
114
        self.default_t_max = t_min + 10.0
1✔
115

116
        self.overlap_prob = None
1✔
117
        self.overlap_fields = None
1✔
118
        self.first_obs = None
1✔
119
        self.last_obs = None
1✔
120
        self.n_fields = None
1✔
121
        self.rectangular_area = None
1✔
122
        self.double_extragalactic_area = None
1✔
123

124
        if not hasattr(self, "dist"):
1✔
125
            self.dist = None
1✔
126

127
    def get_name(self):
1✔
128
        raise NotImplementedError
129

130
    def get_full_name(self):
1✔
131
        raise NotImplementedError
132

133
    def unpack_skymap(self):
1✔
134
        raise NotImplementedError
135

136
    @staticmethod
1✔
137
    def get_tiling_line():
1✔
138
        return ""
1✔
139

140
    @staticmethod
1✔
141
    def get_obs_line():
1✔
142
        raise NotImplementedError
143

144
    @staticmethod
1✔
145
    def remove_variability_line():
1✔
146
        raise NotImplementedError
147

148
    @staticmethod
1✔
149
    def fid_to_band(fid: int):
1✔
150
        fid_band = {1: "g", 2: "r", 3: "i"}
1✔
151
        return fid_band[fid]
1✔
152

153
    def get_overlap_line(self):
1✔
154
        """ """
155
        if (self.overlap_prob is not None) and (
1✔
156
            self.double_extragalactic_area is not None
157
        ):
158
            return (
1✔
159
                f"We covered {self.overlap_prob:.1f}% ({self.double_extragalactic_area:.1f} sq deg) "
160
                f"of the reported localization region. "
161
                "This estimate accounts for chip gaps. "
162
            )
163
        else:
164
            self.logger.warning("No overlap line added!")
×
165
            return ""
×
166

167
    def filter_ampel(self, res):
1✔
168
        self.logger.debug("Running AMPEL filter")
1✔
169

170
        shaped_alert = ZiAlertSupplier.shape_alert_dict(res, ["FilterTest"])
1✔
171
        filterres = self.ampel_filter_class.process(alert=shaped_alert)
1✔
172
        if filterres:
1✔
173
            return True
1✔
174
        else:
175
            return False
1✔
176

177
    @backoff.on_exception(
1✔
178
        backoff.expo,
179
        requests.exceptions.RequestException,
180
        max_time=600,
181
    )
182
    def add_res_to_cache(self, res):
1✔
183

184
        for res_alert in res:
1✔
185

186
            if res_alert["objectId"] not in self.cache.keys():
1✔
187
                self.cache[res_alert["objectId"]] = res_alert
1✔
188
            elif (
×
189
                res_alert["candidate"]["jd"]
190
                > self.cache[res_alert["objectId"]]["candidate"]["jd"]
191
            ):
192
                self.cache[res_alert["objectId"]] = res_alert
×
193

194
    def add_to_cache_by_names(self, *args):
1✔
195
        for ztf_name in args:
1✔
196
            query_res = ampel_api_name(ztf_name, logger=self.logger)
1✔
197
            self.add_res_to_cache(query_res)
1✔
198

199
    def check_ampel_filter(self, ztf_name):
1✔
200
        lvl = logging.getLogger().getEffectiveLevel()
1✔
201
        logging.getLogger().setLevel(logging.DEBUG)
1✔
202
        self.logger.info("Set logger level to DEBUG")
1✔
203
        all_query_res = ampel_api_name(ztf_name, logger=self.logger)
1✔
204
        pipeline_bool = False
1✔
205
        for query_res in all_query_res:
1✔
206
            self.logger.info("Checking filter f (no prv)")
1✔
207
            no_prv_bool = self.filter_f_no_prv(query_res)
1✔
208
            self.logger.info(f"Filter f (np prv): {no_prv_bool}")
1✔
209
            if no_prv_bool:
1✔
210
                self.logger.info("Checking ampel filter")
1✔
211
                bool_ampel = self.filter_ampel(query_res)
1✔
212
                self.logger.info(f"ampel filter: {bool_ampel}")
1✔
213
                if bool_ampel:
1✔
214
                    self.logger.info("Checking filter f (history)")
1✔
215
                    history_bool = self.filter_f_history(query_res)
1✔
216
                    self.logger.info(f"Filter f (history): {history_bool}")
1✔
217
                    if history_bool:
1✔
218
                        pipeline_bool = True
1✔
219
        self.logger.info(f"Setting logger back to {lvl}")
1✔
220
        logging.getLogger().setLevel(lvl)
1✔
221
        return pipeline_bool
1✔
222

223
    def plot_ztf_observations(self, **kwargs):
1✔
224
        self.get_multi_night_summary().show_gri_fields(**kwargs)
×
225

226
    def get_multi_night_summary(self, max_days=None):
1✔
227
        return get_obs_summary(self.t_min, max_days=max_days)
1✔
228

229
    def query_ampel(
1✔
230
        self,
231
        t_min=None,
232
        t_max=None,
233
    ):
234
        if t_max is None:
1✔
235
            t_max = self.default_t_max
×
236

237
        if t_min is None:
1✔
238
            t_min = self.t_min
1✔
239

240
        self.logger.info("Commencing skymap scan")
1✔
241

242
        self.logger.debug(
1✔
243
            f"API skymap search: nside = {self.cone_nside} / # pixels = {len(self.cone_ids)} / timespan = {t_max.jd-t_min.jd:.1f} days."
244
        )
245

246
        query_res = []
1✔
247

248
        resume = True
1✔
249
        chunk_size = 8000
1✔
250
        resume_token = None
1✔
251

252
        while resume:
1✔
253
            res, resume_token = ampel_api_skymap(
1✔
254
                pixels=self.cone_ids,
255
                nside=self.cone_nside,
256
                t_min_jd=t_min.jd,
257
                t_max_jd=t_max.jd,
258
                logger=self.logger,
259
                chunk_size=chunk_size,
260
                resume_token=resume_token,
261
                warn_exceeding_chunk=False,
262
            )
263
            query_res.extend(res)
1✔
264

265
            if len(res) < chunk_size:
1✔
266
                resume = False
1✔
267
                self.logger.info("Done.")
1✔
268
            else:
269
                self.logger.info(
×
270
                    f"Chunk size reached ({chunk_size}), commencing next query."
271
                )
272

273
        self.logger.info(
1✔
274
            f"Before filtering: Found {len(query_res)} candidates. Commencing filtering now."
275
        )
276
        return query_res
1✔
277

278
    def scan_area(
1✔
279
        self,
280
        t_min=None,
281
        t_max=None,
282
    ):
283
        """
284
        Retrieve alerts for the healpix map from AMPEL API,
285
        filter the candidates and create a summary
286
        """
287

288
        query_res = self.query_ampel(t_min=t_min, t_max=t_max)
1✔
289

290
        ztf_ids_first_stage = []
1✔
291
        for res in tqdm(query_res):
1✔
292
            if self.filter_f_no_prv(res):
1✔
293
                if self.filter_ampel(res):
1✔
294
                    ztf_ids_first_stage.append(res["objectId"])
1✔
295

296
        ztf_ids_first_stage = list(set(ztf_ids_first_stage))
1✔
297

298
        self.logger.info(f"{len(ztf_ids_first_stage)} candidates survive filtering")
1✔
299

300
        self.logger.info(f"Retrieving alert history from AMPEL")
1✔
301

302
        results = self.ampel_object_search(ztf_ids=ztf_ids_first_stage)
1✔
303

304
        for res in results:
1✔
305
            self.add_res_to_cache(res)
1✔
306

307
        self.logger.info(f"Found {len(self.cache)} candidates")
1✔
308

309
        self.create_candidate_summary()
1✔
310

311
    def filter_f_no_prv(self, res):
1✔
312
        raise NotImplementedError
313

314
    def fast_filter_f_no_prv(self, res):
1✔
315
        return self.filter_f_no_prv(res)
×
316

317
    def filter_f_history(self, res):
1✔
318
        raise NotImplementedError
319

320
    def fast_filter_f_history(self, res):
1✔
321
        return self.filter_f_history(res)
×
322

323
    def find_cone_coords(self):
1✔
324
        raise NotImplementedError
325

326
    @staticmethod
1✔
327
    def wrap_around_180(ra_deg: float):
1✔
328
        """ """
329
        ra_rad = np.deg2rad(ra_deg)
1✔
330
        ra_rad[ra_rad > np.pi] -= 2 * np.pi
1✔
331
        ra_deg = np.rad2deg(ra_rad)
1✔
332
        return ra_deg
1✔
333

334
    def ampel_object_search(self, ztf_ids: list) -> list:
1✔
335
        """ """
336
        all_results = []
1✔
337

338
        for ztf_id in tqdm(ztf_ids):
1✔
339

340
            # get the full lightcurve from the API
341
            query_res = ampel_api_lightcurve(ztf_name=ztf_id, logger=self.logger)
1✔
342

343
            final_res = []
1✔
344

345
            for res in query_res:
1✔
346
                if self.filter_f_history(res):
1✔
347
                    final_res.append(res)
1✔
348

349
            all_results.append(final_res)
1✔
350

351
        return all_results
1✔
352

353
    # @staticmethod
354
    def calculate_abs_mag(self, mag: float, redshift: float) -> float:
1✔
355
        """ """
356
        luminosity_distance = cosmo.luminosity_distance(redshift).value * 10**6
1✔
357
        abs_mag = mag - 5 * (np.log10(luminosity_distance) - 1)
1✔
358

359
        return abs_mag
1✔
360

361
    def get_candidates_lines(self):
1✔
362
        if len(self.cache) > 0:
1✔
363
            s = (
1✔
364
                "We are left with the following high-significance transient "
365
                "candidates by our pipeline, all lying within the "
366
                f"{100 * self.prob_threshold}% localization of the skymap.\n\n{self.parse_candidates()}"
367
            )
368
        else:
369
            s = "\n\nNo candidate counterparts were detected."
×
370

371
        return s
1✔
372

373
    def parse_candidates(self):
1✔
374

375
        table = (
1✔
376
            "+--------------------------------------------------------------------------------+\n"
377
            "| ZTF Name     | IAU Name  | RA (deg)    | DEC (deg)   | Filter | Mag   | MagErr |\n"
378
            "+--------------------------------------------------------------------------------+\n"
379
        )
380
        for name, res in sorted(self.cache.items()):
1✔
381

382
            jds = [x["jd"] for x in res["prv_candidates"]]
1✔
383

384
            if res["candidate"]["jd"] > max(jds):
1✔
385
                latest = res["candidate"]
1✔
386
            else:
387
                latest = res["prv_candidates"][jds.index(max(jds))]
×
388

389
            old_flag = ""
1✔
390

391
            second_det = [x for x in jds if x > min(jds) + 0.01]
1✔
392

393
            if len(second_det) > 0:
1✔
394
                if Time.now().jd - second_det[0] > 1.0:
1✔
395
                    old_flag = "(MORE THAN ONE DAY SINCE SECOND DETECTION)"
1✔
396

397
            tns_result = " ------- "
1✔
398
            tns_name, tns_date, tns_group = ampel_api_tns(
1✔
399
                latest["ra"], latest["dec"], searchradius_arcsec=3
400
            )
401
            if tns_name:
1✔
402
                tns_result = tns_name
1✔
403

404
            line = "| {0} | {1} | {2:011.7f} | {3:+011.7f} | {4}      | {5:.2f} | {6:.2f}   | {7} \n".format(
1✔
405
                name,
406
                tns_result,
407
                float(latest["ra"]),
408
                float(latest["dec"]),
409
                ["g", "r", "i"][latest["fid"] - 1],
410
                latest["magpsf"],
411
                latest["sigmapsf"],
412
                old_flag,
413
                sign="+",
414
                prec=7,
415
            )
416
            table += line
1✔
417

418
        table += "+--------------------------------------------------------------------------------+\n\n"
1✔
419
        return table
1✔
420

421
    def draft_gcn(self):
1✔
422

423
        if self.first_obs is None:
1✔
424
            self.first_obs = Time(
×
425
                input(
426
                    "What was the first observation date? (YYYY-MM-DD HH:MM:SS [UTC])"
427
                )
428
            )
429

430
        first_obs_dt = self.first_obs.datetime
1✔
431
        pretty_date = first_obs_dt.strftime("%Y-%m-%d")
1✔
432
        pretty_time = first_obs_dt.strftime("%H:%M")
1✔
433

434
        text = (
1✔
435
            f"Astronomer Name (Institute of Somewhere), ............. report,\n\n"
436
            f"On behalf of the Zwicky Transient Facility (ZTF) and Global Relay of Observatories Watching Transients Happen (GROWTH) collaborations: \n\n"
437
            f"As part of the ZTF neutrino follow up program (Stein et al. 2022), we observed the localization region of the {self.get_full_name()} with the Palomar 48-inch telescope, equipped with the 47 square degree ZTF camera (Bellm et al. 2019, Graham et al. 2019). {self.get_tiling_line()}"
438
            f"We started observations in the g- and r-band beginning at {pretty_date} {pretty_time} UTC, "
439
            f"approximately {(self.first_obs.jd - self.t_min.jd) * 24.0:.1f} hours after event time. {self.get_overlap_line()}"
440
            f"{self.get_obs_line()} \n \n"
441
            "The images were processed in real-time through the ZTF reduction and image subtraction pipelines at IPAC to search for potential counterparts (Masci et al. 2019). "
442
            "AMPEL (Nordin et al. 2019, Stein et al. 2021) was used to search the alerts database for candidates. "
443
            "We reject stellar sources (Tachibana and Miller 2018) and moving objects, and "
444
            f"apply machine learning algorithms (Mahabal et al. 2019) {self.remove_variability_line()}. "
445
            f"{self.get_candidates_lines()} \n\n"
446
        )
447

448
        if self.dist:
1✔
449
            text += (
×
450
                "The GW distance estimate is {:.0f} [{:.0f} - {:.0f}] Mpc.\n\n".format(
451
                    self.dist, self.dist - self.dist_unc, self.dist + self.dist_unc
452
                )
453
            )
454
        else:
455
            pass
1✔
456

457
        text += self.text_summary()
1✔
458

459
        text += (
1✔
460
            "ZTF and GROWTH are worldwide collaborations comprising Caltech, USA; IPAC, USA; WIS, Israel; OKC, Sweden; JSI/UMd, USA; DESY, Germany; TANGO, Taiwan; UW Milwaukee, USA; LANL, USA; TCD, Ireland; IN2P3, France.\n\n"
461
            "GROWTH acknowledges generous support of the NSF under PIRE Grant No 1545949.\n"
462
            "Alert distribution service provided by DIRAC@UW (Patterson et al. 2019).\n"
463
            "Alert database searches are done by AMPEL (Nordin et al. 2019).\n"
464
            "Alert filtering is performed with the nuztf (Stein et al. 2021, https://github.com/desy-multimessenger/nuztf).\n"
465
        )
466
        if self.dist:
1✔
467
            text += "Alert filtering and follow-up coordination is being undertaken by the Fritz marshal system (FIXME CITATION NEEDED)."
×
468

469
        return text
1✔
470

471
    @staticmethod
1✔
472
    def extract_ra_dec(nside, index):
1✔
473
        """ """
474
        theta, phi = hp.pix2ang(nside, index, nest=True)
1✔
475
        ra_deg = np.rad2deg(phi)
1✔
476
        dec_deg = np.rad2deg(0.5 * np.pi - theta)
1✔
477

478
        return (ra_deg, dec_deg)
1✔
479

480
    @staticmethod
1✔
481
    def extract_npix(nside, ra, dec):
1✔
482
        """ " """
483
        theta = 0.5 * np.pi - np.deg2rad(dec)
1✔
484
        phi = np.deg2rad(ra)
1✔
485

486
        return int(hp.ang2pix(nside, theta, phi, nest=True))
1✔
487

488
    def create_candidate_summary(self, outfile=None):
1✔
489
        """Create pdf with lightcurve plots of all candidates"""
490

491
        if outfile is None:
1✔
492
            pdf_path = self.summary_path + ".pdf"
1✔
493
        else:
494
            pdf_path = outfile
×
495

496
        self.logger.info(f"Saving lightcurves to: {pdf_path}")
1✔
497

498
        with PdfPages(pdf_path) as pdf:
1✔
499
            for (name, alert) in tqdm(sorted(self.cache.items())):
1✔
500

501
                fig, _ = lightcurve_from_alert(
1✔
502
                    [alert],
503
                    include_cutouts=True,
504
                    logger=self.logger,
505
                    t_0_mjd=self.t_min.mjd,
506
                )
507

508
                pdf.savefig()
1✔
509
                plt.close()
1✔
510

511
    def create_overview_table(self, outfile=None):
1✔
512
        """Create csv table of all candidates"""
513
        if outfile is None:
1✔
514
            csv_path = self.summary_path + ".csv"
1✔
515
        else:
516
            csv_path = outfile
×
517

518
        self.logger.info(f"Saving overview table to {csv_path}")
1✔
519

520
        ztf_ids = []
1✔
521
        ras = []
1✔
522
        decs = []
1✔
523
        mags = []
1✔
524
        crossmatches = []
1✔
525

526
        data = {"ztf_id": [], "RA": [], "Dec": [], "mag": [], "xmatch": []}
1✔
527

528
        for (ztf_id, alert) in tqdm(sorted(self.cache.items())):
1✔
529
            data["ztf_id"].append(ztf_id)
1✔
530
            data["RA"].append(alert["candidate"]["ra"])
1✔
531
            data["Dec"].append(alert["candidate"]["dec"])
1✔
532
            data["mag"].append(alert["candidate"]["magpsf"])
1✔
533
            data["xmatch"].append(get_cross_match_info(raw=alert, logger=self.logger))
1✔
534

535
        df = pandas.DataFrame.from_dict(data)
1✔
536

537
        df.to_csv(csv_path)
1✔
538

539
    @staticmethod
1✔
540
    def parse_ztf_filter(fid: int):
1✔
541
        """ """
542
        return ["g", "r", "i"][fid - 1]
1✔
543

544
    def tns_summary(self):
1✔
545
        """ """
546
        summary = ""
1✔
547

548
        for name, res in sorted(self.cache.items()):
1✔
549
            detections = [
1✔
550
                x
551
                for x in res["prv_candidates"] + [res["candidate"]]
552
                if "isdiffpos" in x.keys()
553
            ]
554
            detection_jds = [x["jd"] for x in detections]
1✔
555
            first_detection = detections[detection_jds.index(min(detection_jds))]
1✔
556
            latest = [
1✔
557
                x
558
                for x in res["prv_candidates"] + [res["candidate"]]
559
                if "isdiffpos" in x.keys()
560
            ][-1]
561
            summary += f"Candidate: {name} / RA={res['candidate']['ra']} / Dec={res['candidate']['dec']} / First detection={first_detection['jd']}\n"
1✔
562
            try:
1✔
563
                last_upper_limit = [
1✔
564
                    x
565
                    for x in res["prv_candidates"]
566
                    if np.logical_and(
567
                        "isdiffpos" in x.keys(), x["jd"] < first_detection["jd"]
568
                    )
569
                ][-1]
570
                summary += f"Last Upper Limit: {last_upper_limit['jd']} / band={self.parse_ztf_filter(last_upper_limit['fid'])} / maglim={last_upper_limit['diffmaglim']:.3f}\n"
×
571

572
            except IndexError:
1✔
573
                last_upper_limit = None
1✔
574
                summary += "Last Upper Limit: None\n"
1✔
575

576
            summary += f"First Detection: {first_detection['jd']} / band={self.parse_ztf_filter(first_detection['fid'])} / mag={first_detection['magpsf']:.3f} +/- {first_detection['sigmapsf']:.3f}\n"
1✔
577

578
            hours_after_merger = 24.0 * (first_detection["jd"] - self.t_min.jd)
1✔
579

580
            summary += f"First observed {hours_after_merger:.2f} hours after merger\n"
1✔
581

582
            if last_upper_limit:
1✔
583
                summary += f"It has risen {-latest['magpsf'] + last_upper_limit['diffmaglim']} / band={self.parse_ztf_filter(latest['fid'])} / Last upper limit was in band {self.parse_ztf_filter(last_upper_limit['fid'])}\n"
×
584

585
            summary += f"{[x['jd'] for x in res['prv_candidates'] + [res['candidate']] if 'isdiffpos' in x.keys()]}\n"
1✔
586

587
        self.logger.info(summary)
1✔
588

589
        return summary
1✔
590

591
    def peak_mag_summary(self):
1✔
592
        for name, res in sorted(self.cache.items()):
×
593

594
            detections = [
×
595
                x
596
                for x in res["prv_candidates"] + [res["candidate"]]
597
                if "isdiffpos" in x.keys()
598
            ]
599
            detection_mags = [x["magpsf"] for x in detections]
×
600
            brightest = detections[detection_mags.index(min(detection_mags))]
×
601

602
            diff = 0.0
×
603
            df = None
×
604

605
            for fid in [1, 2, 3]:
×
606
                dets = [x["magpsf"] for x in detections if int(x["fid"]) == fid]
×
607
                if len(dets) > 1:
×
608
                    nd = max(dets) - min(dets)
×
609

610
                    if nd > diff:
×
611
                        diff = nd
×
612
                        df = self.parse_ztf_filter(fid)
×
613

614
            tns_result = ""
×
615
            tns_name, tns_date, tns_group = ampel_api_tns(
×
616
                brightest["ra"], brightest["dec"], searchradius_arcsec=3.0
617
            )
618
            if tns_name:
×
619
                tns_result = f"({tns_name})"
×
620

621
            xmatch_info = get_cross_match_info(raw=res, logger=self.logger)
×
622

623
            print(
×
624
                f"Candidate {name} peaked at {brightest['magpsf']:.1f} {tns_result}on "
625
                f"{brightest['jd']:.1f} with filter {self.parse_ztf_filter(brightest['fid'])}. "
626
                f"Max range of {diff:.1f} mag with filter {df}. {xmatch_info}"
627
            )
628

629
    def candidate_text(self, name, first_detection, lul_lim, lul_jd):
1✔
630
        raise NotImplementedError
631

632
    def text_summary(self):
1✔
633
        """ """
634
        text = ""
1✔
635
        for name, res in sorted(self.cache.items()):
1✔
636
            detections = [
1✔
637
                x
638
                for x in res["prv_candidates"] + [res["candidate"]]
639
                if "isdiffpos" in x.keys()
640
            ]
641
            detection_jds = [x["jd"] for x in detections]
1✔
642
            first_detection = detections[detection_jds.index(min(detection_jds))]
1✔
643
            latest = [
1✔
644
                x
645
                for x in res["prv_candidates"] + [res["candidate"]]
646
                if "isdiffpos" in x.keys()
647
            ][-1]
648
            try:
1✔
649
                last_upper_limit = [
1✔
650
                    x
651
                    for x in res["prv_candidates"]
652
                    if np.logical_and(
653
                        "isdiffpos" in x.keys(), x["jd"] < first_detection["jd"]
654
                    )
655
                ][-1]
656

657
                text += self.candidate_text(
×
658
                    name,
659
                    first_detection["jd"],
660
                    last_upper_limit["diffmaglim"],
661
                    last_upper_limit["jd"],
662
                )
663

664
            # No pre-detection upper limit
665
            except IndexError:
1✔
666
                text += self.candidate_text(name, first_detection["jd"], None, None)
1✔
667

668
            ned_z, ned_dist = query_ned_for_z(
1✔
669
                ra_deg=latest["ra"],
670
                dec_deg=latest["dec"],
671
                searchradius_arcsec=1,
672
                logger=self.logger,
673
            )
674

675
            if ned_z:
1✔
676
                ned_z = float(ned_z)
1✔
677
                absmag = self.calculate_abs_mag(latest["magpsf"], ned_z)
1✔
678
                if ned_z > 0:
1✔
679
                    z_dist = Distance(z=ned_z, cosmology=cosmo).value
1✔
680
                    text += f"It has a spec-z of {ned_z:.3f} [{z_dist:.0f} Mpc] and an abs. mag of {absmag:.1f}. Distance to SDSS galaxy is {ned_dist:.2f} arcsec. "
1✔
681
                    if self.dist:
1✔
682
                        gw_dist_interval = [
×
683
                            self.dist - self.dist_unc,
684
                            self.dist + self.dist_unc,
685
                        ]
686

687
            c = SkyCoord(res["candidate"]["ra"], res["candidate"]["dec"], unit="deg")
1✔
688
            g_lat = c.galactic.b.degree
1✔
689
            if abs(g_lat) < 15.0:
1✔
690
                text += f"It is located at a galactic latitude of {g_lat:.2f} degrees. "
×
691

692
            xmatch_info = get_cross_match_info(raw=res, logger=self.logger)
1✔
693
            text += xmatch_info
1✔
694
            text += "\n"
1✔
695

696
        if len(text) > 0:
1✔
697
            text = f"Amongst our candidates, \n\n{text}\n\n"
1✔
698

699
        return text
1✔
700

701
    def calculate_overlap_with_observations(
1✔
702
        self, fields=None, pid=None, first_det_window_days=3.0, min_sep=0.01
703
    ):
704
        if fields is None:
1✔
705
            mns = self.get_multi_night_summary(first_det_window_days)
1✔
706

707
        else:
708

709
            class MNS:
×
710
                def __init__(self, data):
×
711
                    self.data = pandas.DataFrame(
×
712
                        data, columns=["field", "ra", "dec", "datetime"]
713
                    )
714

715
            data = []
×
716

717
            for f in fields:
×
718
                ra, dec = ztfquery_fields.get_field_centroid(f)[0]
×
719
                for i in range(2):
×
720
                    t = Time(self.t_min.jd + 0.1 * i, format="jd").utc
×
721
                    t.format = "isot"
×
722
                    t = t.value
×
723
                    data.append([f, ra, dec, t])
×
724

725
            mns = MNS(data)
×
726

727
        # Skip all 64 simultaneous quadrant entries, we only need one per observation for qa log
728
        # data = mns.data.copy().iloc[::64]
729

730
        data = mns.data.copy()
1✔
731

732
        ras = np.ones_like(data["field"]) * np.nan
1✔
733
        decs = np.ones_like(data["field"]) * np.nan
1✔
734

735
        # Actually load up ra/dec
736

737
        veto_fields = []
1✔
738

739
        for field in list(set(data["field"])):
1✔
740

741
            mask = data["field"] == field
1✔
742

743
            res = ztfquery_fields.get_field_centroid(field)
1✔
744

745
            if len(res) > 0:
1✔
746

747
                ras[mask] = res[0][0]
1✔
748
                decs[mask] = res[0][1]
1✔
749

750
            else:
751
                veto_fields.append(field)
×
752

753
        if len(veto_fields) > 0:
1✔
754
            self.logger.info(
×
755
                f"No RA/Dec found by ztfquery for fields {veto_fields}. "
756
                f"These observation have to be ignored."
757
            )
758

759
        data["ra"] = ras
1✔
760
        data["dec"] = decs
1✔
761

762
        mask = np.array([~np.isnan(x) for x in data["ra"]])
1✔
763

764
        data = data[mask]
1✔
765

766
        if pid is not None:
1✔
767
            pid_mask = data["pid"] == str(pid)
×
768
            data = data[pid_mask]
×
769

770
        obs_times = np.array(
1✔
771
            [
772
                Time(
773
                    data["datetime"].iat[i].replace(" ", "T"),
774
                    format="isot",
775
                    scale="utc",
776
                )
777
                for i in range(len(data))
778
            ]
779
        )
780

781
        if first_det_window_days is not None:
1✔
782
            first_det_mask = [
1✔
783
                x < Time(self.t_min.jd + first_det_window_days, format="jd").utc
784
                for x in obs_times
785
            ]
786
            data = data[first_det_mask]
1✔
787
            obs_times = obs_times[first_det_mask]
1✔
788

789
        if len(obs_times) == 0:
1✔
790
            self.logger.warning("No observations found for this event.")
×
791
            return None
×
792

793
        self.logger.info(f"Most recent observation found is {obs_times[-1]}")
1✔
794
        self.logger.info("Unpacking observations")
1✔
795

796
        pix_map = dict()
1✔
797
        pix_obs_times = dict()
1✔
798

799
        field_pix = get_flatpix(nside=self.nside, logger=self.logger)
1✔
800

801
        for i, obs_time in enumerate(tqdm(obs_times)):
1✔
802

803
            field = data["field"].iat[i]
1✔
804

805
            flat_pix = field_pix[field]
1✔
806

807
            t = obs_time.jd
1✔
808

809
            for p in flat_pix:
1✔
810
                if p not in pix_obs_times.keys():
1✔
811
                    pix_obs_times[p] = [t]
1✔
812
                else:
813
                    pix_obs_times[p] += [t]
1✔
814

815
                if p not in pix_map.keys():
1✔
816
                    pix_map[p] = [field]
1✔
817
                else:
818
                    pix_map[p] += [field]
1✔
819

820
        npix = hp.nside2npix(self.nside)
1✔
821
        theta, phi = hp.pix2ang(self.nside, np.arange(npix), nest=False)
1✔
822
        radecs = SkyCoord(ra=phi * u.rad, dec=(0.5 * np.pi - theta) * u.rad)
1✔
823
        idx = np.where(np.abs(radecs.galactic.b.deg) <= 10.0)[0]
1✔
824

825
        double_in_plane_pixels = []
1✔
826
        double_in_plane_probs = []
1✔
827
        single_in_plane_pixels = []
1✔
828
        single_in_plane_prob = []
1✔
829
        veto_pixels = []
1✔
830
        plane_pixels = []
1✔
831
        plane_probs = []
1✔
832
        times = []
1✔
833
        double_no_plane_prob = []
1✔
834
        double_no_plane_pixels = []
1✔
835
        single_no_plane_prob = []
1✔
836
        single_no_plane_pixels = []
1✔
837

838
        overlapping_fields = []
1✔
839

840
        for i, p in enumerate(tqdm(hp.nest2ring(self.nside, self.pixel_nos))):
1✔
841

842
            if p in pix_obs_times.keys():
1✔
843

844
                if p in idx:
1✔
845
                    plane_pixels.append(p)
×
846
                    plane_probs.append(self.map_probs[i])
×
847

848
                obs = pix_obs_times[p]
1✔
849

850
                # check which healpix are observed twice
851
                if max(obs) - min(obs) > min_sep:
1✔
852
                    # is it in galactic plane or not?
853
                    if p not in idx:
1✔
854
                        double_no_plane_prob.append(self.map_probs[i])
1✔
855
                        double_no_plane_pixels.append(p)
1✔
856
                    else:
857
                        double_in_plane_probs.append(self.map_probs[i])
×
858
                        double_in_plane_pixels.append(p)
×
859

860
                else:
861
                    if p not in idx:
1✔
862
                        single_no_plane_pixels.append(p)
1✔
863
                        single_no_plane_prob.append(self.map_probs[i])
1✔
864
                    else:
865
                        single_in_plane_prob.append(self.map_probs[i])
×
866
                        single_in_plane_pixels.append(p)
×
867

868
                overlapping_fields += pix_map[p]
1✔
869

870
                times += list(obs)
1✔
871
            else:
872
                veto_pixels.append(p)
1✔
873

874
        overlapping_fields = sorted(list(set(overlapping_fields)))
1✔
875

876
        _observations = data.query("obsjd in @times").reset_index(drop=True)[
1✔
877
            ["datetime", "exptime", "fid", "field"]
878
        ]
879
        bands = [self.fid_to_band(fid) for fid in _observations["fid"].values]
1✔
880
        _observations["band"] = bands
1✔
881
        _observations.drop(columns=["fid"], inplace=True)
1✔
882
        self.observations = _observations
1✔
883

884
        self.logger.info("All observations:")
1✔
885
        self.logger.info(f"\n{self.observations}")
1✔
886

887
        try:
1✔
888
            self.first_obs = Time(min(times), format="jd")
1✔
889
            self.first_obs.utc.format = "isot"
1✔
890
            self.last_obs = Time(max(times), format="jd")
1✔
891
            self.last_obs.utc.format = "isot"
1✔
892

893
        except ValueError:
×
894
            err = (
×
895
                f"No observations of this field were found at any time between {self.t_min} and"
896
                f"{obs_times[-1]}. Coverage overlap is 0%, but recent observations might be missing!"
897
            )
898
            self.logger.error(err)
×
899
            raise ValueError(err)
900

901
        self.logger.info(f"Observations started at {self.first_obs.jd}")
1✔
902

903
        return (
1✔
904
            double_in_plane_pixels,
905
            double_in_plane_probs,
906
            single_in_plane_pixels,
907
            single_in_plane_prob,
908
            veto_pixels,
909
            plane_pixels,
910
            plane_probs,
911
            times,
912
            double_no_plane_prob,
913
            double_no_plane_pixels,
914
            single_no_plane_prob,
915
            single_no_plane_pixels,
916
            overlapping_fields,
917
        )
918

919
    def plot_overlap_with_observations(
1✔
920
        self, fields=None, pid=None, first_det_window_days=None, min_sep=0.01
921
    ):
922
        """ """
923

924
        overlap_res = self.calculate_overlap_with_observations(
1✔
925
            fields=fields,
926
            pid=pid,
927
            first_det_window_days=first_det_window_days,
928
            min_sep=min_sep,
929
        )
930

931
        if overlap_res is None:
1✔
932
            self.logger.warning("Not plotting overlap with observations.")
×
933
            return
×
934
        else:
935
            (
1✔
936
                double_in_plane_pixels,
937
                double_in_plane_probs,
938
                single_in_plane_pixels,
939
                single_in_plane_prob,
940
                veto_pixels,
941
                plane_pixels,
942
                plane_probs,
943
                times,
944
                double_no_plane_prob,
945
                double_no_plane_pixels,
946
                single_no_plane_prob,
947
                single_no_plane_pixels,
948
                overlapping_fields,
949
            ) = overlap_res
950

951
        fig = plt.figure()
1✔
952
        plt.subplot(projection="aitoff")
1✔
953

954
        self.overlap_fields = list(set(overlapping_fields))
1✔
955

956
        self.overlap_prob = np.sum(double_in_plane_probs + double_no_plane_prob) * 100.0
1✔
957

958
        size = hp.max_pixrad(self.nside) ** 2 * 50.0
1✔
959

960
        veto_pos = np.array(
1✔
961
            [hp.pixelfunc.pix2ang(self.nside, i, lonlat=True) for i in veto_pixels]
962
        ).T
963

964
        if len(veto_pos) > 0:
1✔
965

966
            plt.scatter(
1✔
967
                np.radians(self.wrap_around_180(veto_pos[0])),
968
                np.radians(veto_pos[1]),
969
                color="red",
970
                s=size,
971
            )
972

973
        plane_pos = np.array(
1✔
974
            [hp.pixelfunc.pix2ang(self.nside, i, lonlat=True) for i in plane_pixels]
975
        ).T
976

977
        if len(plane_pos) > 0:
1✔
978

979
            plt.scatter(
×
980
                np.radians(self.wrap_around_180(plane_pos[0])),
981
                np.radians(plane_pos[1]),
982
                color="green",
983
                s=size,
984
            )
985

986
        single_pos = np.array(
1✔
987
            [
988
                hp.pixelfunc.pix2ang(self.nside, i, lonlat=True)
989
                for i in single_no_plane_pixels
990
            ]
991
        ).T
992

993
        if len(single_pos) > 0:
1✔
994
            plt.scatter(
1✔
995
                np.radians(self.wrap_around_180(single_pos[0])),
996
                np.radians(single_pos[1]),
997
                c=single_no_plane_prob,
998
                vmin=0.0,
999
                vmax=max(self.data[self.key]),
1000
                s=size,
1001
                cmap="gray",
1002
            )
1003

1004
        plot_pos = np.array(
1✔
1005
            [
1006
                hp.pixelfunc.pix2ang(self.nside, i, lonlat=True)
1007
                for i in double_no_plane_pixels
1008
            ]
1009
        ).T
1010

1011
        if len(plot_pos) > 0:
1✔
1012
            plt.scatter(
1✔
1013
                np.radians(self.wrap_around_180(plot_pos[0])),
1014
                np.radians(plot_pos[1]),
1015
                c=double_no_plane_prob,
1016
                vmin=0.0,
1017
                vmax=max(self.data[self.key]),
1018
                s=size,
1019
            )
1020

1021
        red_patch = mpatches.Patch(color="red", label="Not observed")
1✔
1022
        gray_patch = mpatches.Patch(color="gray", label="Observed once")
1✔
1023
        violet_patch = mpatches.Patch(
1✔
1024
            color="green", label="Observed Galactic Plane (|b|<10)"
1025
        )
1026
        plt.legend(handles=[red_patch, gray_patch, violet_patch])
1✔
1027

1028
        message = (
1✔
1029
            "In total, {0:.2f} % of the contour was observed at least once.\n"
1030
            "This estimate includes {1:.2f} % of the contour "
1031
            "at a galactic latitude <10 deg.\n"
1032
            "In total, {2:.2f} % of the contour was observed at least twice. \n"
1033
            "In total, {3:.2f} % of the contour was observed at least twice, "
1034
            "and excluding low galactic latitudes.\n"
1035
            "These estimates account for chip gaps.".format(
1036
                100
1037
                * (
1038
                    np.sum(double_in_plane_probs)
1039
                    + np.sum(single_in_plane_prob)
1040
                    + np.sum(single_no_plane_prob)
1041
                    + np.sum(double_no_plane_prob)
1042
                ),
1043
                100 * np.sum(plane_probs),
1044
                100.0 * (np.sum(double_in_plane_probs) + np.sum(double_no_plane_prob)),
1045
                100.0 * np.sum(double_no_plane_prob),
1046
            )
1047
        )
1048

1049
        all_pix = single_in_plane_pixels + double_in_plane_pixels
1✔
1050

1051
        n_pixels = len(
1✔
1052
            single_in_plane_pixels
1053
            + double_in_plane_pixels
1054
            + double_no_plane_pixels
1055
            + single_no_plane_pixels
1056
        )
1057
        n_double = len(double_no_plane_pixels + double_in_plane_pixels)
1✔
1058
        n_plane = len(plane_pixels)
1✔
1059

1060
        self.healpix_area = (
1✔
1061
            hp.pixelfunc.nside2pixarea(self.nside, degrees=True) * n_pixels
1062
        )
1063
        self.double_extragalactic_area = (
1✔
1064
            hp.pixelfunc.nside2pixarea(self.nside, degrees=True) * n_double
1065
        )
1066
        plane_area = hp.pixelfunc.nside2pixarea(self.nside, degrees=True) * n_plane
1✔
1067

1068
        self.overlap_fields = overlapping_fields
1✔
1069

1070
        #     area = (2. * base_ztf_rad)**2 * float(len(overlapping_fields))
1071
        #     n_fields = len(overlapping_fields)
1072

1073
        self.logger.info(
1✔
1074
            f"{n_pixels} pixels were covered, covering approximately {self.healpix_area:.2g} sq deg."
1075
        )
1076
        self.logger.info(
1✔
1077
            f"{n_double} pixels were covered at least twice (b>10), covering approximately {self.double_extragalactic_area:.2g} sq deg."
1078
        )
1079
        self.logger.info(
1✔
1080
            f"{n_plane} pixels were covered at low galactic latitude, covering approximately {plane_area:.2g} sq deg."
1081
        )
1082
        return fig, message
1✔
1083

1084
    def crosscheck_prob(self):
1✔
1085

1086
        try:
×
1087
            nside = self.ligo_nside
×
1088
        except AttributeError:
×
1089
            nside = self.nside
×
1090

1091
        class MNS:
×
1092
            def __init__(self, data):
×
1093
                self.data = pandas.DataFrame(
×
1094
                    data, columns=["field", "ra", "dec", "datetime"]
1095
                )
1096

1097
        data = []
×
1098

1099
        for f in self.overlap_fields:
×
1100
            ra, dec = ztfquery_fields.field_to_coords(float(f))[0]
×
1101
            t = Time(self.t_min.jd, format="jd").utc
×
1102
            t.format = "isot"
×
1103
            t = t.value
×
1104
            data.append([f, ra, dec, t])
×
1105

1106
            mns = MNS(data)
×
1107

1108
        data = mns.data.copy()
×
1109

1110
        self.logger.info("Unpacking observations")
×
1111
        field_prob = 0.0
×
1112

1113
        ps = []
×
1114

1115
        for index, row in tqdm(data.iterrows()):
×
1116
            pix = get_quadrant_ipix(nside, row["ra"], row["dec"])
×
1117

1118
            flat_pix = []
×
1119

1120
            for sub_list in pix:
×
1121
                for p in sub_list:
×
1122
                    flat_pix.append(p)
×
1123

1124
            flat_pix = list(set(flat_pix))
×
1125
            ps += flat_pix
×
1126

1127
        ps = list(set(ps))
×
1128

1129
        for p in hp.ring2nest(nside, ps):
×
1130
            field_prob += self.data[self.key][int(p)]
×
1131

1132
        self.logger.info(
×
1133
            f"Intergrating all fields overlapping 90% contour gives {100*field_prob:.2g}%"
1134
        )
1135

1136
    def export_cache_to_fritz(self, group_id=None):
1✔
1137

1138
        if group_id is None:
×
1139
            group_id = self.default_fritz_group
×
1140

1141
        saved_sources = []
×
1142

1143
        for source in self.cache.keys():
×
1144
            response = save_source_to_group(object_id=source, group_id=group_id)
×
1145

1146
            if response.status_code not in [200]:
×
1147
                self.logger.warning(f"Bad API call for source {source}: {response}")
×
1148
            else:
1149
                saved_sources.append(source)
×
1150

1151
        self.logger.info(f"Saved {len(saved_sources)} to fritz group {group_id}")
×
1152

1153
    def export_cache_to_pickle(self, output_path: str):
1✔
1154
        with open(output_path, "wb") as f:
×
1155
            self.logger.info(f"Saving to {output_path}")
×
1156
            pickle.dump(self.cache, f)
×
1157

1158
    def load_cache_from_pickle(self, input_path: str):
1✔
1159
        with open(input_path, "rb") as f:
×
1160
            self.logger.info(f"Loading from {input_path}")
×
1161
            self.cache = pickle.load(f)
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc