Skip to content

Commit

Permalink
changes TNS query (they changed their API)
Browse files Browse the repository at this point in the history
  • Loading branch information
simeonreusch committed Sep 23, 2020
1 parent 41ed0a0 commit a1c1542
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 33 deletions.
41 changes: 28 additions & 13 deletions ampel_magic.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,13 +283,13 @@ def plot_ztf_observations(self, **kwargs):
self.get_multi_night_summary().show_gri_fields(**kwargs)

def get_multi_night_summary(self, max_days=None):
now = datetime.datetime.now()

if max_days is not None:
date_1 = datetime.datetime.strptime(self.mns_time, "%Y%m%d")
end_date = date_1 + datetime.timedelta(days=max_days)
end_date = end_date.strftime("%Y-%m-%d")
else:
now = datetime.datetime.now()
end_date = now.strftime("%Y-%m-%d")

if self.mns is None:
Expand Down Expand Up @@ -482,13 +482,16 @@ def query_tns(self, ra, dec, searchradius_arcsec=3):
raise e
try:
query_result = extcat_query.findwithin_2Dsphere(ra=ra, dec=dec, rs_arcsec=searchradius_arcsec, find_one = False)
name = "{} {}".format(query_result[0]['name_prefix'],query_result[0]['name'])
discovery_date = "{}".format(query_result[0]['discoverydate'])

name = f"{query_result[0]['name_prefix']} {query_result[0]['objname']}"
discovery_date = f"{query_result[0]['discoverydate']}"

try:
discovery_group = "{}".format(query_result[0]['source_group']['group_name'])
discovery_group = f"{query_result[0]['source_group']['group_name']}"
except KeyError:
discovery_group = None
return name, discovery_date, discovery_group

except TypeError:
return None

Expand Down Expand Up @@ -907,19 +910,23 @@ def __init__(self, data):
obs_times = np.array([Time(data["datetime"].iat[i], format="isot", scale="utc")
for i in range(len(data))])


if first_det_window_days is not None:
first_det_mask = [x < Time(self.t_min.jd + first_det_window_days, format="jd").utc for x in obs_times]
data = data[first_det_mask]
obs_times = obs_times[first_det_mask]


pix_obs_times = dict()

print("Unpacking observations")

pix_map = dict()

for i, obs_time in enumerate(tqdm(obs_times)):
pix = get_quadrant_ipix(nside, data["ra"].iat[i], data["dec"].iat[i])
radec = [f"{data['ra'].iat[i]} {data['dec'].iat[i]}"]
coords = SkyCoord(radec, unit=(u.hourangle, u.deg))
# pix = get_quadrant_ipix(nside, data["ra"].iat[i], data["dec"].iat[i])
pix = get_quadrant_ipix(nside, coords.ra.deg[0], coords.dec.deg[0])

field = data["field"].iat[i]

flat_pix = []
Expand All @@ -942,6 +949,7 @@ def __init__(self, data):
else:
pix_map[p] += [field]


npix = hp.nside2npix(nside)
theta, phi = hp.pix2ang(nside, np.arange(npix), nest=False)
radecs = SkyCoord(ra=phi * u.rad, dec=(0.5 * np.pi - theta) * u.rad)
Expand All @@ -961,18 +969,18 @@ def __init__(self, data):
single_no_plane_pixels = []

overlapping_fields = []

for i, p in enumerate(tqdm(hp.nest2ring(nside, self.pixel_nos))):

if p in pix_obs_times.keys():

if p in idx:
print("1")
plane_pixels.append(p)
plane_probs.append(self.map_probs[i])

obs = pix_obs_times[p]

# check which healpix are observed twice
if max(obs) - min(obs) > min_sep:
# is it in galactic plane or not?
if p not in idx:
double_no_plane_prob.append(self.map_probs[i])
double_no_plane_pixels.append(p)
Expand All @@ -982,9 +990,11 @@ def __init__(self, data):

else:
if p not in idx:
print("4")
single_no_plane_pixels.append(p)
single_no_plane_prob.append(self.map_probs[i])
else:
print("5")
single_probs.append(self.map_probs[i])
single_pixels.append(p)

Expand All @@ -994,10 +1004,15 @@ def __init__(self, data):
else:
veto_pixels.append(p)

# print(f"double no plane prob = {double_no_plane_prob}")
# print(f"probs = {probs}")
# print(f"single no plane prob = {single_no_plane_prob}")
# print(f"single probs = {single_probs}")

overlapping_fields = sorted(list(set(overlapping_fields)))
self.overlap_fields = list(set(overlapping_fields))

self.overlap_prob = np.sum(probs + single_probs) * 100.
self.overlap_prob = np.sum(probs + single_probs + double_no_plane_prob + single_no_plane_prob) * 100.

size = hp.max_pixrad(nside) ** 2 * 50.

Expand Down Expand Up @@ -1062,8 +1077,8 @@ def __init__(self, data):
self.last_obs.utc.format = "isot"

except ValueError:
raise Exception("No observations of this field were found at any time after {0:.2f} JD. "
"Coverage overlap is 0%!".format(self.t_min.jd))
raise Exception(f"No observations of this field were found at any time after {self.t_min.jd:.2f} JD ({self.t_min}). "
"Coverage overlap is 0%!")

print("Observations started at {0}".format(self.first_obs.jd))

Expand Down
38 changes: 18 additions & 20 deletions neutrino_scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,43 +71,41 @@ def __init__(self, nu_name=None, manual_args=None, gcn_no=None, logger=None, con
self.gcn_no = gcn_no
self.dist = None

print("Neutrino time: {0}".format(nu_time))
print(f"Neutrino time: {nu_time}")

self.ra_max = float(max(ra[1:]) + ra[0])
self.ra_min = float(min(ra[1:]) + ra[0])
self.dec_max = float(max(dec[1:]) + dec[0])
self.dec_min = float(min(dec[1:]) + dec[0])

print("Coordinates: RA = {0} ({1} - {2})".format(ra[0], self.ra_min, self.ra_max))
print("Coordinates: Dec = {0} ({1} - {2})".format(dec[0], self.dec_min, self.dec_max))
print(f"Coordinates: RA = {ra[0]} ({self.ra_min} - {self.ra_max})")
print(f"Coordinates: DEC = {dec[0]} ({self.dec_min} - {self.dec_max})")

self.output_path = "{0}/{1}.pdf".format(nu_candidate_output_dir, nu_name)
self.output_path = f"{nu_candidate_output_dir}/{nu_name}.pdf"
AmpelWizard.__init__(self, t_min=nu_time, run_config=nu_run_config, logger=logger, cone_nside=cone_nside)
self.default_t_max = Time.now()
self.prob_threshold = 0.9
self.area = (self.ra_max - self.ra_min) * (self.dec_max - self.dec_min) * abs(np.cos(np.radians(dec[0])))
print("Projected Area: {0}".format(self.area))
print(f"Projected Area: {self.area}")
self.map_coords, self.pixel_nos, self.nside, self.map_probs, self.data, self.key = self.unpack_map()

@staticmethod
def gcn_url(gcn_number):
return "https://gcn.gsfc.nasa.gov/gcn3/{0}.gcn3".format(gcn_number)
return f"https://gcn.gsfc.nasa.gov/gcn3/{gcn_number}.gcn3"

def get_name(self):
return self.nu_name

def get_full_name(self):
return "neutrino event {0} ({1} et. al, GCN {2})".format(self.get_name(), self.author, self.gcn_no)
return f"neutrino event {self.get_name()} ({self.author} et. al, GCN {self.gcn_no})"

def get_overlap_line(self):
return "We covered {0:.1f} sq deg, corresponding to {1:.1f}% of the reported localisation region. " \
"This estimate accounts for chip gaps. ".format(
self.area, self.overlap_prob)
return f"We covered {self.area:.1f} sq deg, corresponding to {self.overlap_prob:.1f}% of the reported localization region. " \
"This estimate accounts for chip gaps. "

def candidate_text(self, name, first_detection, lul_lim, lul_jd):
text = "{0} was first detected on {1}. ".format(
name, first_detection
)
text = f"{name} was first detected on {first_detection}. "

return text

# @staticmethod
Expand Down Expand Up @@ -138,7 +136,7 @@ def strip_numbers(line):
def parse_gcn(self, gcn_number):
url = self.gcn_url(gcn_number)
page = requests.get(url)
print("Found GCN: {0}".format(url))
print(f"Found GCN: {url}")
name = author = ra = dec = time = None
for line in page.text.splitlines():
line = "".join([x for x in line if x not in ["Â"]])
Expand All @@ -157,7 +155,7 @@ def parse_gcn(self, gcn_number):
x.isdigit(), x in [":", "."]
)])
raw_date = name.split("-")[1][:6]
ut_time = "20{0}-{1}-{2}T{3}".format(raw_date[0:2], raw_date[2:4], raw_date[4:6], raw_time)
ut_time = f"20{raw_date[0:2]}-{raw_date[2:4]}-{raw_date[4:6]}T{raw_time}"
time = Time(ut_time, format='isot', scale='utc')

try:
Expand All @@ -172,7 +170,7 @@ def parse_gcn(self, gcn_number):

print(name, author, ra, dec, time)

raise ParsingError("Error parsing GCN {0}".format(url))
raise ParsingError(f"Error parsing GCN {0}".format(url))


def parse_gcn_archive(self):
Expand Down Expand Up @@ -212,9 +210,9 @@ def parse_gcn_for_no(base_nu_name, url="https://gcn.gsfc.nasa.gov/gcn3_archive.h
if gcn_no is None:
gcn_no = "".join([x for x in res[2] if x.isdigit()])
name = res[3].split(" - ")[0]
print("Found match to {0}: {1}".format(base_nu_name, name))
print(f"Found match to {base_nu_name}: {name}")
else:
raise Exception("Multiple matches found to {0}".format(base_nu_name))
raise Exception(f"Multiple matches found to {base_nu_name}")

elif np.logical_and("gcn3_arch_old" in line, latest_archive_no is None):
url = line.split('"')[1]
Expand All @@ -241,13 +239,13 @@ def find_gcn_no(self, base_nu_name):
if name is None:
raise ParsingError("No GCN match found for {0}".format(base_nu_name))

print("Match is {0} (GCN #{1})".format(name, gcn_no))
print(f"Match is {name} (GCN #{gcn_no})")

return gcn_no

def get_latest_gcn(self):
latest = self.parse_gcn_archive()[0]
print("Latest GCN is {0} (GCN #{1})".format(latest[0], latest[1]))
print(f"Latest GCN is {latest[0]} (GCN #{latest[1]})")
return latest[1]


Expand Down

0 comments on commit a1c1542

Please sign in to comment.