Skip to content

Commit

Permalink
Merge branch 'devel'
Browse files Browse the repository at this point in the history
  • Loading branch information
rhandberg committed Jun 10, 2021
2 parents 647d0aa + c75a368 commit ccc62f1
Show file tree
Hide file tree
Showing 12 changed files with 290 additions and 82 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ jobs:

- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade pip wheel
grep "numpy" requirements.txt | xargs -I {} pip install "{}"
pip install -r requirements.txt
pip install codecov pytest-cov
Expand Down Expand Up @@ -69,7 +69,7 @@ jobs:

- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade pip wheel
grep "numpy" requirements.txt | xargs -I {} pip install "{}"
pip install -r requirements.txt
Expand Down
2 changes: 1 addition & 1 deletion flows/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# flake8: noqa

from .photometry import photometry
from .download_catalog import download_catalog
from .catalogs import download_catalog
from .visibility import visibility
from .config import load_config

Expand Down
6 changes: 5 additions & 1 deletion flows/api/datafiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def get_datafile(fileid):
return jsn

#--------------------------------------------------------------------------------------------------
def get_datafiles(targetid=None, filt=None):
def get_datafiles(targetid=None, filt=None, minversion=None):
"""
Get list of data file IDs to be processed.
Expand All @@ -42,6 +42,8 @@ def get_datafiles(targetid=None, filt=None):
filt (str, optional): Filter the returned list:
- ``missing``: Return only data files that have not yet been processed.
- ``'all'``: Return all data files.
minversion (str, optional): Special filter matching files not processed at least with
the specified version (defined internally in API for now).
Returns:
list: List of data files the can be processed.
Expand All @@ -64,6 +66,8 @@ def get_datafiles(targetid=None, filt=None):
params = {}
if targetid is not None:
params['targetid'] = targetid
if minversion is not None:
params['version'] = minversion
params['filter'] = filt

r = requests.get('https://flows.phys.au.dk/api/datafiles.php',
Expand Down
198 changes: 141 additions & 57 deletions flows/download_catalog.py → flows/catalogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,21 @@
import shlex
import requests
import numpy as np
from astropy.time import Time
from astropy.coordinates import SkyCoord, Angle
from astropy import units as u
from astropy.table import Table
from astroquery.sdss import SDSS
from .config import load_config
from .aadc_db import AADC_DB
from .ztf import query_ztf_id

#--------------------------------------------------------------------------------------------------
class CasjobsException(Exception):
class CasjobsError(RuntimeError):
pass

#--------------------------------------------------------------------------------------------------
class CasjobsMemoryError(Exception):
class CasjobsMemoryError(RuntimeError):
pass

#--------------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -54,7 +57,7 @@ def configure_casjobs(overwrite=False):
wsid = config.get('casjobs', 'wsid', fallback=None)
passwd = config.get('casjobs', 'password', fallback=None)
if wsid is None or passwd is None:
raise CasjobsException("CasJobs WSID and PASSWORD not in config.ini")
raise CasjobsError("CasJobs WSID and PASSWORD not in config.ini")

try:
with open(casjobs_config, 'w') as fid:
Expand Down Expand Up @@ -189,7 +192,8 @@ def _query_casjobs_refcat2(coo_centre, radius=24*u.arcmin):
results = []
for line in output:
line = line.strip()
if line == '': continue
if line == '':
continue
if 'ERROR' in line:
error_thrown = True
break
Expand Down Expand Up @@ -225,10 +229,10 @@ def _query_casjobs_refcat2(coo_centre, radius=24*u.arcmin):
if 'query results exceed memory limit' in error_msg.lower():
raise CasjobsMemoryError("Query results exceed memory limit")
else:
raise CasjobsException("ERROR detected in CasJobs: " + error_msg)
raise CasjobsError("ERROR detected in CasJobs: " + error_msg)

if not results:
raise CasjobsException("Could not find anything on CasJobs")
raise CasjobsError("Could not find anything on CasJobs")

logger.debug("Found %d results", len(results))
return results
Expand All @@ -253,12 +257,19 @@ def query_apass(coo_centre, radius=24*u.arcmin):
if isinstance(radius, (float, int)):
radius *= u.deg

r = requests.post('https://www.aavso.org/cgi-bin/apass_dr10_download.pl',
data={'ra': coo_centre.ra.deg, 'dec': coo_centre.dec.deg, 'radius': Angle(radius).deg, 'outtype': '1'})
data = {
'ra': coo_centre.icrs.ra.deg,
'dec': coo_centre.icrs.dec.deg,
'radius': Angle(radius).deg,
'outtype': '1'
}

res = requests.post('https://www.aavso.org/cgi-bin/apass_dr10_download.pl', data=data)
res.raise_for_status()

results = []

lines = r.text.split("\n")
lines = res.text.split("\n")
#header = lines[0]

for line in lines[1:]:
Expand All @@ -280,10 +291,47 @@ def query_apass(coo_centre, radius=24*u.arcmin):

return results

#--------------------------------------------------------------------------------------------------
def query_sdss(coo_centre, radius=24*u.arcmin, dr=16, clean=True):
"""
Queries SDSS catalog using cone-search around the position using astroquery.
Parameters:
coo_centre (:class:`astropy.coordinates.SkyCoord`): Coordinates of centre of search cone.
radius (float, optional):
dr (int, optional): SDSS Data Release to query. Default=16.
clean (bool, optional): Clean results for stars only and no other problems.
Returns:
list: Astropy Table with SDSS information.
.. codeauthor:: Emir Karamehmetoglu <emir.k@phys.au.dk>
.. codeauthor:: Rasmus Handberg <rasmush@phys.au.dk>
"""

if isinstance(radius, (float, int)):
radius *= u.deg

AT_sdss = SDSS.query_region(coo_centre,
photoobj_fields=['type', 'clean', 'ra', 'dec', 'psfMag_u'],
data_release=dr,
timeout=600,
radius=radius)

if clean:
# Clean SDSS following https://www.sdss.org/dr12/algorithms/photo_flags_recommend/
# 6 == star, clean means remove interp, edge, suspicious defects, deblending problems, duplicates.
AT_sdss = AT_sdss[(AT_sdss['type'] == 6) & (AT_sdss['clean'] == 1)]

return AT_sdss

#--------------------------------------------------------------------------------------------------
def query_all(coo_centre, radius=24*u.arcmin, dist_cutoff=2*u.arcsec):
"""
Query all catalogs, and return merged catalog.
Query all catalogs (REFCAT2, APASS and SDSS) and return merged catalog.
Merging of catalogs are done using sky coordinates:
https://docs.astropy.org/en/stable/coordinates/matchsep.html#matching-catalogs
Parameters:
coo_centre (:class:`astropy.coordinates.SkyCoord`): Coordinates of centre of search cone.
Expand All @@ -294,94 +342,130 @@ def query_all(coo_centre, radius=24*u.arcmin, dist_cutoff=2*u.arcsec):
list: List of dicts with catalog stars.
.. codeauthor:: Rasmus Handberg <rasmush@phys.au.dk>
.. codeauthor:: Emir Karamehmetoglu <emir.k@phys.au.dk>
"""

# Query the REFCAT2 catalog using CasJobs around the target position:
results = query_casjobs_refcat2(coo_centre, radius=radius)
AT_results = Table(results)

# Query APASS around the target position:
results_apass = query_apass(coo_centre, radius=radius)
AT_apass = Table(results_apass)

# Match the two catalogs using coordinates:
# https://docs.astropy.org/en/stable/coordinates/matchsep.html#matching-catalogs
ra = np.array([r['ra'] for r in results])
decl = np.array([r['decl'] for r in results])
refcat = SkyCoord(ra=ra, dec=decl, unit=u.deg, frame='icrs')
#ra = np.array([r['ra'] for r in results])
#decl = np.array([r['decl'] for r in results])
refcat = SkyCoord(ra=AT_results['ra'], dec=AT_results['decl'], unit=u.deg, frame='icrs')

ra_apass = np.array([r['ra'] for r in results_apass])
decl_apass = np.array([r['decl'] for r in results_apass])
apass = SkyCoord(ra=ra_apass, dec=decl_apass, unit=u.deg, frame='icrs')
#ra_apass = np.array([r['ra'] for r in results_apass])
#decl_apass = np.array([r['decl'] for r in results_apass])
apass = SkyCoord(ra=AT_apass['ra'], dec=AT_apass['decl'], unit=u.deg, frame='icrs')

# Match the two catalogs:
idx, d2d, _ = apass.match_to_catalog_sky(refcat)
sep_constraint = d2d <= dist_cutoff # Reject any match further away than the cutoff:
idx_apass = np.arange(len(idx)) # since idx maps apass to refcat

# Update results table with APASS bands of interest
AT_results.add_columns([None,None,None],names=['B_mag','V_mag','u_mag']) # Results table does not have uBV
AT_results['B_mag'][idx[sep_constraint]] = AT_apass[idx_apass[sep_constraint]]['B_mag']
AT_results['V_mag'][idx[sep_constraint]] = AT_apass[idx_apass[sep_constraint]]['V_mag']
AT_results['u_mag'][idx[sep_constraint]] = AT_apass[idx_apass[sep_constraint]]['u_mag']

# Create SDSS cat
AT_sdss = query_sdss(coo_centre, radius=radius)
sdss = SkyCoord(ra=AT_sdss['ra'], dec=AT_sdss['dec'], unit=u.deg, frame='icrs')

# Match to dist_cutoff sky distance (angular) apart
idx, d2d, _ = sdss.match_to_catalog_sky(refcat)
sep_constraint = d2d <= dist_cutoff
idx_sdss = np.arange(len(idx)) # since idx maps sdss to refcat
# TODO: Maybe don't (potentially) overwrite APASS uband with SDSS uband. Decide which is better.
AT_results['u_mag'][idx[sep_constraint]] = AT_sdss[idx_sdss[sep_constraint]]['psfMag_u']

# # Go through the matches and make sure they are valid:
# for k, i in enumerate(idx):
# # If APASS doesn't contain any new information anyway, skip it:
# if results_apass[k]['B_mag'] is None and results_apass[k]['V_mag'] is None \
# and results_apass[k]['u_mag'] is None:
# continue
#
# # Reject any match further away than the cutoff:
# if d2d[k] > dist_cutoff:
# continue
#
# # TODO: Use the overlapping magnitudes to make better match:
# #photdist = 0
# #for photfilt in ('g_mag', 'r_mag', 'i_mag', 'z_mag'):
# # if results_apass[k][photfilt] and results[i][photfilt]:
# # photdist += (results[i][photfilt] - results_apass[k][photfilt])**2
# #print( np.sqrt(photdist) )
#
# # Update the results "table" with the APASS filters:
# results[i].update({
# 'V_mag': results_apass[k]['V_mag'],
# 'B_mag': results_apass[k]['B_mag'],
# 'u_mag': results_apass[k]['u_mag']
# })
#
# # Fill in empty fields where nothing was matched:
# for k in range(len(results)):
# if 'V_mag' not in results[k]:
# results[k].update({
# 'B_mag': None,
# 'V_mag': None,
# 'u_mag': None
# })

# TODO: Adjust receiving functions so we can just pass the astropy table instead.
return [dict(zip(AT_results.colnames, row)) for row in AT_results]

# Go through the matches and make sure they are valid:
for k, i in enumerate(idx):
# If APASS doesn't contain any new information anyway, skip it:
if results_apass[k]['B_mag'] is None and results_apass[k]['V_mag'] is None \
and results_apass[k]['u_mag'] is None:
continue

# Reject any match further away than the cutoff:
if d2d[k] > dist_cutoff:
continue

# TODO: Use the overlapping magnitudes to make better match:
#photdist = 0
#for photfilt in ('g_mag', 'r_mag', 'i_mag', 'z_mag'):
# if results_apass[k][photfilt] and results[i][photfilt]:
# photdist += (results[i][photfilt] - results_apass[k][photfilt])**2
#print( np.sqrt(photdist) )

# Update the results "table" with the APASS filters:
results[i].update({
'V_mag': results_apass[k]['V_mag'],
'B_mag': results_apass[k]['B_mag'],
'u_mag': results_apass[k]['u_mag']
})

# Fill in empty fields where nothing was matched:
for k in range(len(results)):
if 'V_mag' not in results[k]:
results[k].update({
'B_mag': None,
'V_mag': None,
'u_mag': None
})
#--------------------------------------------------------------------------------------------------
def download_catalog(target=None, radius=24*u.arcmin, radius_ztf=3*u.arcsec, dist_cutoff=2*u.arcsec):
"""
Download reference star catalogs and save to Flows database.
return results
Parameters:
target (str or int): Target identifier to download catalog for.
radius (Angle, optional): Radius around target to download catalogs.
radius_ztf (Angle, optional): Radius around target to search for ZTF identifier.
dist_cutoff (Angle, optional): Distance cutoff used for matching catalog positions.
#--------------------------------------------------------------------------------------------------
def download_catalog(target=None, radius=24*u.arcmin, dist_cutoff=2*u.arcsec):
.. codeauthor:: Rasmus Handberg <rasmush@phys.au.dk>
"""

logger = logging.getLogger(__name__)

with AADC_DB() as db:

# Get the information about the target from the database:
if target is not None and isinstance(target, int):
db.cursor.execute("SELECT targetid,target_name,ra,decl FROM flows.targets WHERE targetid=%s;", [target])
db.cursor.execute("SELECT targetid,target_name,ra,decl,discovery_date FROM flows.targets WHERE targetid=%s;", [target])
elif target is not None:
db.cursor.execute("SELECT targetid,target_name,ra,decl FROM flows.targets WHERE target_name=%s;", [target])
db.cursor.execute("SELECT targetid,target_name,ra,decl,discovery_date FROM flows.targets WHERE target_name=%s;", [target])
else:
db.cursor.execute("SELECT targetid,target_name,ra,decl FROM flows.targets WHERE catalog_downloaded=FALSE;")
db.cursor.execute("SELECT targetid,target_name,ra,decl,discovery_date FROM flows.targets WHERE catalog_downloaded=FALSE;")

for row in db.cursor.fetchall():
# The unique identifier of the target:
targetid = int(row['targetid'])
target_name = row['target_name']
dd = row['discovery_date']
if dd is not None:
dd = Time(dd, format='iso', scale='utc')

# Coordinate of the target, which is the centre of the search cone:
coo_centre = SkyCoord(ra=row['ra'], dec=row['decl'], unit=u.deg, frame='icrs')

results = query_all(coo_centre, radius=radius, dist_cutoff=dist_cutoff)

# Query for a ZTF identifier for this target:
ztf_id = query_ztf_id(coo_centre, radius=radius)
ztf_id = query_ztf_id(coo_centre, radius=radius_ztf, discovery_date=dd)

# Insert the catalog into the local database:
try:
#db.cursor.execute("TRUNCATE flows.refcat2;")
db.cursor.executemany("""INSERT INTO flows.refcat2 (
starid,
ra,
Expand Down
6 changes: 5 additions & 1 deletion flows/tns.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,9 @@ def tns_getnames(months=None, date_begin=None, date_end=None, zmin=None, zmax=No
if months is not None and date_end is not None and date_end < date_now - datetime.timedelta(days=months*30):
logger.warning('Months limit restricts days_begin, consider increasing limit_months.')

# API key for Bot
tnsconf = _load_tns_config()

# Parameters for query:
params = {
'discovered_period_value': months, # Reported Within The Last
Expand Down Expand Up @@ -256,7 +259,8 @@ def tns_getnames(months=None, date_begin=None, date_end=None, zmin=None, zmax=No
}

# Query TNS for names:
con = requests.get(url_tns_search, params=params)
headers = {'user-agent': tnsconf['user-agent']}
con = requests.get(url_tns_search, params=params, headers=headers)
con.raise_for_status()

# Parse the CSV table:
Expand Down
Loading

0 comments on commit ccc62f1

Please sign in to comment.