• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

desy-multimessenger / nuztf / 13725459265

07 Mar 2025 04:58PM UTC coverage: 71.901% (-1.7%) from 73.615%
13725459265

push

github

web-flow
Add Kowalski Backend (#490)

471 of 640 new or added lines in 26 files covered. (73.59%)

15 existing lines in 4 files now uncovered.

1978 of 2751 relevant lines covered (71.9%)

0.72 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

9.87
/nuztf/irsa.py
1
#!/usr/bin/env python3
2
# coding: utf-8
3

4
import logging
1✔
5
import os
1✔
6

7
import matplotlib.pyplot as plt
1✔
8
import numpy as np
1✔
9
import pandas as pd
1✔
10
from astropy import constants as const
1✔
11
from astropy import units as u
1✔
12
from astropy.coordinates import SkyCoord
1✔
13
from astropy.table import Table
1✔
14
from astropy.time import Time
1✔
15
from astroquery.exceptions import RemoteServiceError
1✔
16
from astroquery.ipac.ned import Ned
1✔
17
from ztfquery.io import LOCALSOURCE
1✔
18
from ztfquery.lightcurve import LCQuery
1✔
19

20
from nuztf.api import api_name
1✔
21
from nuztf.parse_nu_gcn import find_gcn_no, parse_gcn_circular
1✔
22
from nuztf.style import base_height, base_width, big_fontsize, dpi, plot_dir
1✔
23
from nuztf.utils import cosmo, is_tns_name, is_ztf_name, query_tns_by_name
1✔
24

25
logger = logging.getLogger(__name__)
1✔
26

27

28
def format_date(t, atel=True):
1✔
29
    t.format = "fits"
×
30

31
    if atel:
×
32
        frac_days = f"{t.mjd - int(t.mjd):.2f}"[1:]
×
33
        t.out_subfmt = "date"
×
34

35
        dt = "".join([t.value, frac_days])
×
36
    else:
37
        dt = t.value
×
38

39
    return dt
×
40

41

42
def load_irsa(ra_deg: float, dec_deg: float, radius_arcsec: float = 0.5, **kwargs):
1✔
43
    """
44
    Get lightcuve from IPAC
45
    """
46

47
    logger.debug("Querying IPAC")
×
48
    df = LCQuery.from_position(ra_deg, dec_deg, radius_arcsec, **kwargs).data
×
49

50
    logger.debug(f"Found {len(df)} datapoints")
×
51

52
    if len(df) == 0:
×
53
        logger.info("No data found.")
×
54
        return df
×
55

56
    else:
57
        mask = df.catflags > 0
×
58

59
        flags = list(set(df.catflags))
×
60

61
        logger.info(
×
62
            f"Found {len(df)} datapoints, masking {np.sum(mask)} datapoints with bad flags."
63
        )
64

65
        for flag in sorted(flags):
×
66
            logger.debug(f"{np.sum(df.catflags == flag)} datapoints with flag {flag}")
×
67

68
        df = df.drop(df[mask].index)
×
69
        return df
×
70

71

72
def plot_irsa_lightcurve(
1✔
73
    source_name: str,
74
    nu_name: list = None,
75
    source_coords: list = None,
76
    source_redshift: float = None,
77
    plot_mag: bool = False,
78
    atel: bool = True,
79
    plot_folder: str = plot_dir,
80
    extra_folder: str = None,
81
    check_obs: bool = True,
82
    check_obs_lookback_weeks: float = 4,
83
    from_cache: bool = False,
84
    cache_dir: str = os.path.join(LOCALSOURCE, "cache/"),
85
    expanded_labels: bool = True,
86
    ylim: tuple = None,
87
    radius_arcsec: float = 0.5,
88
    query_irsa_for_logs: bool = True,
89
) -> None:
90
    plot_title = source_name
×
91

92
    # If there are no coordinates, try name resolve to get coordinates!
93

94
    if source_coords is None:
×
95
        logger.info(f"Trying to resolve {source_name} and query for coordinates.")
×
96
        logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO)
×
97

98
        # Try ampel to find ZTF coordinates
99

100
        if is_ztf_name(name=source_name):
×
101
            logger.info("Source name is a ZTF name.")
×
NEW
102
            res = api_name(source_name, with_history=False)[0]
×
103
            source_coords = [res["candidate"]["ra"], res["candidate"]["dec"]]
×
104
            logger.info(f"Found ZTF coordinates for source {source_name}")
×
105

106
        # Try TNS
107

108
        elif is_tns_name(name=source_name):
×
109
            logger.info("Source name is a TNS name.")
×
110
            result_dict = query_tns_by_name(name=source_name)
×
111

112
            if not result_dict:
×
113
                logger.warning(f"{source_name} is not in TNS.")
×
114

115
            if result_dict:
×
116
                logger.info(f"Found {source_name} on TNS.")
×
117
                res = result_dict["data"]["reply"]
×
118
                ra = res["radeg"]
×
119
                dec = res["decdeg"]
×
120
                source_coords = [ra, dec]
×
121
                if np.logical_and("redshift" in res.keys(), source_redshift is None):
×
122
                    source_redshift = res["redshift"]
×
123

124
        # Otherwise try NED
125

126
        else:
127
            logger.info(
×
128
                "Source name is neither as ZTF, nor a TNS name. Querying NED instead."
129
            )
130
            result_table = Ned.query_object(source_name)
×
131

132
            if len(result_table) == 0:
×
133
                logger.warning(
×
134
                    f"Failed to resolve name {source_name} in NED. Trying to be clever instead."
135
                )
136

137
                querystring = "".join(
×
138
                    [
139
                        x
140
                        for x in source_name
141
                        if x in [str(i) for i in range(10)] + ["+", "-"]
142
                    ]
143
                )
144

145
                result_table = Ned.query_object(
×
146
                    "".join(
147
                        [
148
                            x
149
                            for x in source_name
150
                            if x in [str(i) for i in range(10)] + ["+", "-"]
151
                        ]
152
                    )
153
                )
154

155
            if len(result_table) == 1:
×
156
                source_coords = [result_table["RA"][0], result_table["DEC"][0]]
×
157

158
                if "ZTF" in plot_title:
×
159
                    plot_title += f' ({result_table["Object Name"][0]})'
×
160

161
                if np.logical_and(
×
162
                    str(result_table["Redshift"][0]) != "--", source_redshift is None
163
                ):
164
                    source_redshift = result_table["Redshift"].data[0]
×
165

166
                logger.info(
×
167
                    f"Using AStroquery NED query result for name {source_name} ({source_coords})"
168
                )
169

170
            if source_coords is None:
×
171
                sc = SkyCoord.from_name(source_name)
×
172
                logger.info(
×
173
                    f"Using Astroquery CDS query result for name {source_name} (RA={sc.ra}, Dec={sc.dec})"
174
                )
175
                source_coords = (sc.ra.value, sc.dec.value)
×
176

177
    # Try to find a catalogue source nearby using coordinates
178

179
    if ("ZTF" in source_name) and source_coords:
×
180
        c = SkyCoord(source_coords[0], source_coords[1], unit=u.deg, frame="icrs")
×
181

182
        r = 0.5 * u.arcsecond
×
183

184
        logger.info("Querying NED")
×
185
        result_table = Ned.query_region(c, radius=r)
×
186

187
        if len(result_table) == 1:
×
188
            if "ZTF" in plot_title:
×
189
                plot_title += f' ({result_table["Object Name"][0]})'
×
190

191
            source_coords = [result_table["RA"][0], result_table["DEC"][0]]
×
192

193
            if np.logical_and(
×
194
                str(result_table["Redshift"][0]) != "--", source_redshift is None
195
            ):
196
                source_redshift = result_table["Redshift"]
×
197

198
            logger.info(
×
199
                f"Found likely match to {source_name}"
200
                f"(type = '{result_table['Type'][0]}'. "
201
                f"distance = {result_table['Separation'][0]} arcsec')"
202
            )
203
        elif len(result_table) > 1:
×
204
            logger.warning(
×
205
                f"Found multiple possible cross-matches: {result_table['Object Name']}"
206
            )
207
        else:
208
            logger.info("No NED crossmatch found.")
×
209

210
    # Query IRSA, or load from cache
211

212
    try:
×
213
        os.makedirs(cache_dir)
×
214
    except OSError:
×
215
        pass
×
216

217
    cache_path = os.path.join(cache_dir, f'{source_name.replace(" ", "")}.csv')
×
218

219
    if from_cache:
×
220
        logger.debug(f"Loading from {cache_path}")
×
221
        df = pd.read_csv(cache_path)
×
222

223
    else:
224
        logger.info("Querying IPAC for a lightcurve")
×
225
        df = load_irsa(source_coords[0], source_coords[1], radius_arcsec=radius_arcsec)
×
226

227
        logger.debug(f"Saving to {cache_path}")
×
228
        if df is not None:
×
229
            df.to_csv(cache_path)
×
230

231
    if df is None:
×
232
        raise ValueError("No data retrieved from IPAC")
233
    data = Table.from_pandas(df)
×
234

235
    logger.info(f"There are a total of {len(data)} detections for {source_name}")
×
236

237
    # Start Figure
238

239
    plt.figure(figsize=(base_width * 1.1, base_height), dpi=dpi)
×
240

241
    if expanded_labels:
×
242
        ax2 = plt.subplot(111)
×
243

244
        ax = ax2.twiny()
×
245

246
    else:
247
        ax = plt.subplot(111)
×
248

249
    # If you have a redshift, you can add a second y axis!
250

251
    if source_redshift is None:
×
252
        logger.info("Querying NED to check for a redshift")
×
253
        try:
×
254
            result_table = Ned.query_object(source_name)
×
255
            if len(result_table["Redshift"]) == 1:
×
256
                if str(result_table["Redshift"][0]) == "--":
×
257
                    raise RemoteServiceError
×
258

259
                source_redshift = result_table["Redshift"][0].data[0]
×
260
                logger.info(f"Found a redshift of {source_redshift}")
×
261
            elif len(result_table["Redshift"]) > 1:
×
262
                logger.warning(f"Found multiple redshifts: {result_table}")
×
263
            else:
264
                raise RemoteServiceError
×
265
        except (RemoteServiceError, IndexError) as e:
×
266
            logger.info("No redshift found")
×
267

268
    elif np.isnan(source_redshift):
×
269
        source_redshift = None
×
270

271
    if source_redshift is not None:
×
272
        ax1b = ax.twinx()
×
273

274
        redshift = 1.0 + source_redshift
×
275

276
        if plot_mag:
×
277
            dist_mod = 5 * (
×
278
                np.log10(cosmo.luminosity_distance(z=source_redshift).to(u.pc).value)
279
                - 1.0
280
            )
281
        else:
282
            conversion_factor = (
×
283
                4
284
                * np.pi
285
                * cosmo.luminosity_distance(z=source_redshift).to(u.cm) ** 2.0
286
                / (redshift)
287
            )
288

289
    cmap = {"zg": "g", "zr": "r", "zi": "orange"}
×
290

291
    wl = {
×
292
        "zg": 472.27,
293
        "zr": 633.96,
294
        "zi": 788.61,
295
    }
296

297
    markersize = 2.0
×
298

299
    latest_index = list(data["mjd"]).index(max(data["mjd"]))
×
300
    latest = data[latest_index]
×
301

302
    dt = format_date(Time(latest["mjd"], format="mjd"), atel=atel)
×
303

304
    logger.info(
×
305
        f"Most recent detection on {dt} UT at a magnitude of "
306
        f"{latest['filtercode'][1]}={latest['mag']:.2f}+/-{latest['magerr']:.2f}"
307
    )
308

309
    # Plot each band (g/r/i)
310

311
    for fc in ["zg", "zr", "zi"]:
×
312
        mask = data["filtercode"] == fc
×
313

314
        mags = data["mag"][mask] * u.ABmag
×
315

316
        magerrs = (data["magerr"][mask] + data["mag"][mask]) * u.ABmag
×
317

318
        if plot_mag:
×
319
            ax.errorbar(
×
320
                data["mjd"][mask],
321
                mags.value,
322
                yerr=data["magerr"][mask],
323
                marker="o",
324
                linestyle=" ",
325
                markersize=markersize,
326
                c=cmap[fc],
327
                label=f"{fc[-1]} ({wl[fc]:.0f} nm)",
328
            )
329

330
            if source_redshift is not None:
×
331
                ax1b.errorbar(
×
332
                    data["mjd"][mask],
333
                    mags.value - dist_mod,
334
                    yerr=data["magerr"][mask],
335
                    marker="o",
336
                    linestyle=" ",
337
                    markersize=markersize,
338
                    c=cmap[fc],
339
                    label=f"{fc[-1]} ({wl[fc]:.0f} nm)",
340
                )
341

342
        else:
343
            flux_j = mags.to(u.Jansky)
×
344

345
            f = (const.c / (wl[fc] * u.nm)).to("Hz")
×
346

347
            flux = (flux_j * f).to("erg cm-2 s-1")
×
348

349
            jerrs = magerrs.to(u.Jansky)
×
350
            ferrs = np.abs((jerrs * f).to("erg cm-2 s-1").value - flux.value)
×
351

352
            ax.errorbar(
×
353
                data["mjd"][mask],
354
                flux.to("erg cm-2 s-1").value,
355
                yerr=ferrs,
356
                marker="o",
357
                linestyle=" ",
358
                markersize=markersize,
359
                c=cmap[fc],
360
                label=f"{fc[-1]} ({wl[fc]:.0f} nm)",
361
            )
362

363
            if source_redshift is not None:
×
364
                l = flux * conversion_factor
×
365

366
                ax1b.errorbar(
×
367
                    data["mjd"][mask],
368
                    l.to("erg s-1"),
369
                    marker="o",
370
                    linestyle=" ",
371
                    markersize=markersize,
372
                    c=cmap[fc],
373
                    label=f"{fc[-1]} ({wl[fc]:.0f} nm)",
374
                )
375

376
    # You can force the y limits if you want
377

378
    if ylim is not None:
×
379
        ax.set_ylim(ylim)
×
380

381
    if plot_mag:
×
382
        ax.set_ylabel(r"Apparent magnitude [AB]", fontsize=big_fontsize)
×
383

384
        ax.invert_yaxis()
×
385

386
        if source_redshift is not None:
×
387
            ax1b.set_ylabel(rf"Absolute magnitude [AB]", fontsize=big_fontsize)
×
388

389
            y_min, y_max = ax.get_ylim()
×
390

391
            ax1b.invert_yaxis()
×
392

393
            ax1b.set_ylim(y_min - dist_mod, y_max - dist_mod)
×
394

395
    else:
396
        ax.set_ylabel(r"$\nu$F$_{\nu}$ [erg cm$^{-2}$ s$^{-1}$]", fontsize=big_fontsize)
×
397

398
        ax.set_yscale("log")
×
399

400
        if source_redshift is not None:
×
401
            ax1b.set_ylabel(r"$\nu$L$_{\nu}$ [erg s$^{-1}$]", fontsize=big_fontsize)
×
402
            ax1b.set_yscale("log")
×
403

404
            y_min, y_max = ax.get_ylim()
×
405
            print(conversion_factor)
×
406

407
            ax1b.set_ylim(
×
408
                y_min * conversion_factor.value, y_max * conversion_factor.value
409
            )
410

411
    ax.set_xlabel("Date [MJD]", fontsize=big_fontsize)
×
412

413
    # Add neutrino
414

415
    if nu_name is None:
×
416
        nu_name = []
×
417

418
    if not isinstance(nu_name, list):
×
419
        nu_name = [nu_name]
×
420

421
    for j, nu in enumerate(nu_name):
×
422
        gcn_no = find_gcn_no(nu)
×
423
        gcn_info = parse_gcn_circular(gcn_no)
×
424

425
        ax.axvline(gcn_info["time"].mjd, linestyle=":", label=nu, color=f"C{j}")
×
426

427
    if expanded_labels:
×
428
        # Set up ISO dates
429

430
        lmjd, umjd = ax.get_xlim()
×
431

432
        lt = Time(lmjd, format="mjd")
×
433
        ut = Time(umjd, format="mjd")
×
434

435
        nt = Time.now()
×
436
        nt.format = "fits"
×
437

438
        mjds = []
×
439
        labs = []
×
440

441
        for year in range(2016, int(nt.value[:4]) + 1):
×
442
            for k, month in enumerate([1, 7]):
×
443
                t = Time(f"{year}-{month}-01T00:00:00.01", format="isot", scale="utc")
×
444
                t.format = "fits"
×
445
                t.out_subfmt = "date"
×
446

447
                if np.logical_and(t > lt, t < ut):
×
448
                    mjds.append(t.mjd)
×
449
                    labs.append(t.value)
×
450

451
        ax2.set_xticks(mjds)
×
452
        ax2.set_xticklabels(labels=labs, rotation=80)
×
453

454
        ax2.set_xlim(lmjd, umjd)
×
455

456
        ax.set_title(f'ZTF Lightcurve of {plot_title.replace("J", " J")}', y=1.4)
×
457

458
        ax2.tick_params(axis="both", which="major", labelsize=big_fontsize)
×
459

460
    ax.tick_params(axis="both", which="both", labelsize=big_fontsize, left=True)
×
461

462
    if source_redshift is not None:
×
463
        ax1b.tick_params(axis="both", which="major", labelsize=big_fontsize)
×
464

465
    ax.legend(
×
466
        loc="upper center",
467
        bbox_to_anchor=(0.5, 1.22 + 0.2 * float(expanded_labels)),
468
        ncol=3 + len(nu_name),
469
        fancybox=True,
470
        fontsize=big_fontsize,
471
    )
472

473
    filename = f"{source_name.replace(' ', '')}_lightcurve{['_flux', ''][plot_mag]}.png"
×
474

475
    output_path = os.path.join(plot_folder, f"{filename}")
×
476

477
    logger.info(f"Saving to {output_path}")
×
478

479
    plt.savefig(output_path, bbox_inches="tight", pad_inches=0.05)
×
480

481
    if extra_folder is not None:
×
482
        extra_path = os.path.join(extra_folder, f"{filename}")
×
483
        logger.info(f"Saving to {extra_path}")
×
484
        plt.savefig(extra_path, bbox_inches="tight", pad_inches=0.05)
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc