diff --git a/.gitignore b/.gitignore index 16e998e..679fafc 100644 --- a/.gitignore +++ b/.gitignore @@ -147,3 +147,12 @@ tests/output/ typings/ *.config + +# Downloaded at run-time in flows_mwmap.py: +flows_mwmap_milkyway.fits + +# input for pytest +tests/input/2020lao/59000.96584_h_e_20200531_33_1_1_1_2020lao_LT_gp.fits.gz +tests/input/2020lao/subtracted/59000.96584_h_e_20200531_33_1_1_1_2020lao_LT_gpdiff.fits.gz +tests/input/2021wyw/ADP.2021-10-15T11_40_06.553.fits.gz +tests/input/2020aatc/SN2020aatc_K_20201213_495s.fits.gz diff --git a/README.md b/README.md new file mode 100644 index 0000000..20e7a15 --- /dev/null +++ b/README.md @@ -0,0 +1,148 @@ +# Flows Pipeline +[![DOI](https://zenodo.org/badge/241705955.svg)](https://zenodo.org/badge/latestdoi/241705955) +[![Tests](https://github.com/SNflows/flows/actions/workflows/tests.yml/badge.svg?branch=devel)](https://github.com/SNflows/flows/actions/workflows/tests.yml) +[![Code Coverage](https://codecov.io/github/SNflows/flows/branch/devel/graph/badge.svg?token=H8CQGPG0U6)](https://codecov.io/github/SNflows/flows) +[![Hits-of-Code](https://hitsofcode.com/github/SNflows/flows?branch=devel)](https://hitsofcode.com/view/github/SNflows/flows?branch=devel) +[![License](https://img.shields.io/github/license/SNflows/flows.svg)](https://github.com/SNflows/flows/blob/devel/LICENSE) + +## Installation instructions +Go to the directory where you want the Python code to be installed and simply download it or clone it via *git* as:: + +```shell +git clone https://github.com/SNflows/flows.git +cd flows +``` + +Required dependencies can be installed using the following command. It is recommended to do this in a dedicated [virtualenv](https://virtualenv.pypa.io/en/stable/) or similar: + +```shell +python3 -m venv env +source env/bin/activate +pip install --upgrade pip +pip install -r requirements.txt +``` + +In addition, in order to run tests and do development, do + +```shell +pip install -r dev_requirements.txt # in same virtual environment as above +_pyver=$(find env/lib/ -type d -mindepth 1 -maxdepth 1 | cut -d '/' -f 3) +ln -s ../env/lib/${_pyver}/site-packages/tendrils/utils/config.ini flows/config.ini +wget -O tests/input/2020aatc/SN2020aatc_K_20201213_495s.fits.gz https://anon.erda.au.dk/share_redirect/FJGx69KFvg +wget -O tests/input/2020lao/59000.96584_h_e_20200531_33_1_1_1_2020lao_LT_gp.fits.gz https://anon.erda.au.dk/share_redirect/E98lmqOVWf +wget -O tests/input/2020lao/subtracted/59000.96584_h_e_20200531_33_1_1_1_2020lao_LT_gpdiff.fits.gz https://anon.erda.au.dk/share_redirect/bIxyzrRXbg +wget -O tests/input/2021wyw/ADP.2021-10-15T11_40_06.553.fits.gz https://anon.erda.au.dk/share_redirect/Gr8p2K7ph5 +``` + +TODO: Reformulate following bullet point and check/decide on which config paths are/should be available...: + +* **Changed with ``tendrils`` API.** If using ``tendrils``, follow the steps below, but then let ``tendrils`` know of the config file location. Alternatively, individual config file elements can be set programatically using `tendrils` and will be saved to a config file automatically. Last step is to create a config-file. Create a file named "config.ini" and place it in the "flows" directory. Make sure that the file can only be read by you (``chmod 0600 config.ini``)! +This file can contain all the settings for running the pipeline. A minimal file for working with the pipeline is + +```ini +[api] +token = + +[TNS] +api_key = +``` + +where your API token can be found on the Flows webpage. + +## How to run tests + +You can test your installation by going to the root directory where you cloned the repository and run the command: + +```shell +pytest +``` + +## Full configuration file + +Text coming soon... + +```ini +########################################################## +### --Tendrils configurations used at FLOWS run-time-- ### +### Configurations without a leading '#' are required ### +### and must be specified by the user. ### +### Configurations with a leading '#' are optional, ### +### and their assigned values are documentation of ### +### their default values in the FLOWS pipeline. ### +### Default values of optional configurations with a ### +### leading '$' signify environment variables resolved ### +### at run-time; their fallbacks are given following ### +### a '/'. ### +########################################################## + +[api] +# photometry_cache = None +# pipeline = False +token = None + +# casjobs: +# wsid and password required for run_catalogs.py, +# user registration at +# https://galex.stsci.edu/casjobs/CreateAccount.aspx +# wsid can be found at +# https://galex.stsci.edu/casjobs/changedetails.aspx +# after login +[casjobs] +# wsid = $CASJOBS_WSID/None +# password = $CASJOBS_PASSWORD/None + +# database: +# username and password required for run_catalogs.py, +# the user is a registered user in the flows database +# with access to the 'adastra' schema +[database] +# username = $AUDBUsername/None +# password = $AUDBPassword/None + +[photometry] +archive_local = None +# output = . + +# TNS: +# api_key required for run_querytns.py, +# user registration at +# https://www.wis-tns.org/user +# api_key is that of a TNS bot; ask a flows group +# member for one +# if user_id and user_name are not given, fallback +# to a TNS bot's bot_id and bot_name, which must +# match with api_key +[TNS] +# api_key = None +# bot_id = 191396 +# bot_name = AUFLOWS_BOT2 +# user_id = None +# user_name = None + +[URL] +# base_url = https://flows.phys.au.dk/api/ +# catalogs_url = reference_stars.php +# catalogs_missing_url = catalog_missing.php +# cleanup_photometry_status_url = cleanup_photometry_status.php +# datafiles_url = datafiles.php +# filters_url = filters.php +# lightcurves_url = lightcurve.php +# photometry_upload_url = upload_photometry.php +# photometry_url = download_photometry.php +# set_photometry_status_url = set_photometry_status.php +# sites_url = sites.php +# targets_post_url = targets_add.php +# targets_url = targets.php +# verify_ssl = True + +[ztf] +# output_photometry = . +``` + +## Making a release + + - Bump sem-version when Devel is ready to merge in file = VERSION (v1.0.0). Checkout devel. Edit Version. Push devel. + - Merge Devel into Master (Create PR from Devel -> Master), wait until tests are passing. Create issues if not. Then Merge. + - Create tag on Master corresponding to right semversion. This means, checkout master. Pull master locally. Create tag using git tag called "v1.0.0" or whatever the sem-version. Push local tag to GitHub. + - Merge Master into devel to propagate tag (Create PR on GitHub). + - Create release on GH releases tab if all tests passing. diff --git a/README.rst b/README.rst deleted file mode 100644 index 0020dd6..0000000 --- a/README.rst +++ /dev/null @@ -1,87 +0,0 @@ -============== -Flows Pipeline -============== -.. image:: https://zenodo.org/badge/241705955.svg - :target: https://zenodo.org/badge/latestdoi/241705955 -.. image:: https://github.com/SNflows/flows/actions/workflows/tests.yml/badge.svg?branch=devel - :target: https://github.com/SNflows/flows/actions/workflows/tests.yml -.. image:: https://codecov.io/github/SNflows/flows/branch/devel/graph/badge.svg?token=H8CQGPG0U6 - :target: https://codecov.io/github/SNflows/flows -.. image:: https://hitsofcode.com/github/SNflows/flows?branch=devel - :alt: Hits-of-Code - :target: https://hitsofcode.com/view/github/SNflows/flows?branch=devel -.. image:: https://img.shields.io/github/license/SNflows/flows.svg - :alt: license - :target: https://github.com/SNflows/flows/blob/devel/LICENSE - -Installation instructions -========================= -* Go to the directory where you want the Python code to be installed and simply download it or clone it via *git* as:: - - >>> git clone https://github.com/SNflows/flows.git . - -* Required dependencies can be installed using the following command. It is recommended to do this in a dedicated `virtualenv `_ or similar: - - >>> pip install -r requirements.txt - >>> pip install -r requirements_dev.txt # for tests/development - -* **Changed with ``tendrils`` API.** If using ``tendrils``, follow the steps below, but then let ``tendrils`` know of the config file location. Alternatively, individual config file elements can be set programatically using `tendrils` and will be saved to a config file automatically. Last step is to create a config-file. Create a file named "config.ini" and place it in the "flows" directory. Make sure that the file can only be read by you (``chmod 0600 config.ini``)! - This file can contain all the settings for running the pipeline. A minimal file for working with the pipeline is - - .. code-block:: ini - - [api] - token = - - [TNS] - api_key = - - Where your API token can be found on the Flows webpage. - - -How to run tests -================ -You can test your installation by going to the root directory where you cloned the repository and run the command:: - ->>> pytest - -Full configuration file -======================= -Text coming soon... - -.. code-block:: ini - - [api] - token = - photometry_cache = - - [photometry] - archive_local = - output = - - [casjobs] - wsid = - password = - - [TNS] - api_key = - bot_id = - bot_name = - user_id = - user_name = - - [ztf] - output_photometry = - - [database] - username = - password = - -Making a release -================ - - - Bump sem-version when Devel is ready to merge in file = VERSION (v1.0.0). Checkout devel. Edit Version. Push devel. - - Merge Devel into Master (Create PR from Devel -> Master), wait until tests are passing. Create issues if not. Then Merge. - - Create tag on Master corresponding to right semversion. This means, checkout master. Pull master locally. Create tag using git tag called "v1.0.0" or whatever the sem-version. Push local tag to GitHub. - - Merge Master into devel to propagate tag (Create PR on GitHub). - - Create release on GH releases tab if all tests passing. diff --git a/fill_photometry_details.py b/fill_photometry_details.py new file mode 100644 index 0000000..ab432f8 --- /dev/null +++ b/fill_photometry_details.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import os.path +import getpass +import numpy as np +from astropy.table import Table +from astropy.units import Quantity +import sys +if sys.path[0] != os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows')): + sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows'))) +from flows.aadc_db import AADC_DB +from tendrils.utils import load_config + +if __name__ == '__main__': + with AADC_DB() as db: + + #db.cursor.execute("SELECT fileid,path,targetid FROM flows.files LEFT JOIN flows.photometry_details d ON d.fileid_phot=files.fileid WHERE files.datatype=2 AND d.fileid_phot IS NULL;") + # + #for row in db.cursor.fetchall(): + # + # fileid_phot = row['fileid'] + # filepath = os.path.join('/archive/photometry/', row['path']) + # + # tab = Table.read(filepath, format='ascii.ecsv') + # + # indx_raw = (tab['starid'] == 0) + # indx_sub = (tab['starid'] == -1) + # indx_ref = (tab['starid'] > 0) + # + # phot_details = { + # 'fileid_phot': fileid_phot, + # 'fileid_img': int(tab.meta['fileid']), + # 'fileid_template': tab.meta['template'], + # 'fileid_diffimg': None if 'diffimg' not in tab.meta else tab.meta['diffimg'], + # 'obstime': tab.meta['obstime-bmjd'], + # 'mag_raw': float(tab[indx_raw]['mag']), + # 'mag_raw_error': float(tab[indx_raw]['mag_error']), + # 'mag_sub': None if not any(indx_sub) else float(tab[indx_sub]['mag']), + # 'mag_sub_error': None if not any(indx_sub) else float(tab[indx_sub]['mag_error']), + # 'zeropoint': float(tab.meta['zp']), + # 'zeropoint_error': None if 'zp_error' not in tab.meta else float(tab.meta['zp_error']), + # 'zeropoint_diff': None if 'zp_diff' not in tab.meta else float(tab.meta['zp_diff']), + # 'fwhm': None if 'fwhm' not in tab.meta else tab.meta['fwhm'], + # 'seeing': None if 'seeing' not in tab.meta else tab.meta['seeing'], + # 'references_detected': int(np.sum(indx_ref)), + # 'used_for_epsf': int(np.sum(tab['used_for_epsf'])), + # 'faintest_reference_detected': float(np.max(tab[indx_ref]['mag'])), + # 'pipeline_version': tab.meta['version'], + # } + # + # for key in ('fwhm', 'seeing'): + # if isinstance(phot_details[key], Quantity): + # phot_details[key] = float(phot_details[key].value) + # + # print(phot_details) + # print(row['targetid']) + # + # db.cursor.execute("""INSERT INTO flows.photometry_details ( + # fileid_phot, + # fileid_img, + # fileid_template, + # fileid_diffimg, + # obstime_bmjd, + # mag_raw, + # mag_raw_error, + # mag_sub, + # mag_sub_error, + # zeropoint, + # zeropoint_error, + # zeropoint_diff, + # fwhm, + # seeing, + # references_detected, + # used_for_epsf, + # faintest_reference_detected, + # pipeline_version + # ) VALUES ( + # %(fileid_phot)s, + # %(fileid_img)s, + # %(fileid_template)s, + # %(fileid_diffimg)s, + # %(obstime)s, + # %(mag_raw)s, + # %(mag_raw_error)s, + # %(mag_sub)s, + # %(mag_sub_error)s, + # %(zeropoint)s, + # %(zeropoint_error)s, + # %(zeropoint_diff)s, + # %(fwhm)s, + # %(seeing)s, + # %(references_detected)s, + # %(used_for_epsf)s, + # %(faintest_reference_detected)s, + # %(pipeline_version)s + # );""", phot_details) + # + #db.conn.commit() + + db.cursor.execute("SELECT fileid,path,targetid FROM flows.files LEFT JOIN flows.photometry_details d ON d.fileid_phot=files.fileid WHERE files.datatype=2 AND d.faintest_reference_detected='NaN'::real;") + for row in db.cursor.fetchall(): + + fileid_phot = row['fileid'] + filepath = os.path.join('/archive/photometry/', row['path']) + + tab = Table.read(filepath, format='ascii.ecsv') + print(len(tab)) + + indx_ref = (tab['starid'] > 0) + + frd = float(np.nanmax(tab[indx_ref]['mag'])) + if np.isnan(frd): + frd = None + + print(fileid_phot, frd) + + db.cursor.execute("UPDATE flows.photometry_details SET faintest_reference_detected=%s WHERE fileid_phot=%s;", [frd, fileid_phot]) + + db.conn.commit() diff --git a/fix_filehash.py b/fix_filehash.py new file mode 100644 index 0000000..02f3e20 --- /dev/null +++ b/fix_filehash.py @@ -0,0 +1,33 @@ +import os.path +import getpass +from flows.aadc_db import AADC_DB +from flows.utilities import get_filehash +from tendrils.utils import load_config + + +if __name__ == '__main__': + config = load_config() + + with AADC_DB() as db: + + db.cursor.execute("SELECT * FROM flows.files WHERE targetid=8;") + + for row in db.cursor.fetchall(): + if row['path'].endswith('.fits.gz'): + continue + + p = row['path'].replace('.fits', '.fits.gz') + + fpath = os.path.join('/archive/raw', p) + try: + fsize = os.path.getsize(fpath) + fhash = get_filehash(fpath) + print(p) + print(fsize) + print(fhash) + + db.cursor.execute("UPDATE flows.files SET path=%s,filehash=%s,filesize=%s WHERE fileid=%s;", [p, fhash, fsize, row['fileid']]) + db.conn.commit() + #break + except FileNotFoundError: + print(f'File {fpath} not found') diff --git a/flows/aadc_db.py b/flows/aadc_db.py index a622c98..410829f 100644 --- a/flows/aadc_db.py +++ b/flows/aadc_db.py @@ -27,16 +27,16 @@ class AADC_DB(object): # pragma: no cover cursor (`psycopg2.Cursor` object): Cursor to use in database. """ - def __init__(self, username=None, password=None): + def __init__(self, username=None, password=None, host=None, dbname=None): """ - Open connection to central TASOC database. - - If ``username`` or ``password`` is not provided or ``None``, + If ``username``, ``password``, ``host`` or ``dbname`` is not provided or ``None``, the user will be prompted for them. Parameters: username (string or None, optional): Username for AADC database. password (string or None, optional): Password for AADC database. + host (string or None, optional): Host for AADC database. + dbname (string or None, optional): DBname for AADC database. """ config = load_config() @@ -54,8 +54,24 @@ def __init__(self, username=None, password=None): if password is None: password = getpass.getpass('Password: ') + if host is None: + host = config.get('database', 'host', fallback=os.environ.get("AUDBHost", None)) + if host is None: + default_host = 'db.adastra.lan' + host = input('Host [%s]: ' % default_host) + if host == '': + host = default_host + + if dbname is None: + dbname = config.get('database', 'dbname', fallback=os.environ.get("AUDBName", None)) + if dbname is None: + default_dbname = 'adastra' + dbname = input('Database [%s]: ' % default_dbname) + if dbname == '': + dbname = default_dbname + # Open database connection: - self.conn = psql.connect('host=10.28.0.127 user=' + username + ' password=' + password + ' dbname=db_aadc') + self.conn = psql.connect(host=host, database=dbname, user=username, password=password) self.cursor = self.conn.cursor(cursor_factory=DictCursor) def close(self): diff --git a/flows/background.py b/flows/background.py index c71d443..f59a9e1 100644 --- a/flows/background.py +++ b/flows/background.py @@ -3,7 +3,7 @@ import numpy as np from astropy.stats import SigmaClip from numpy.typing import ArrayLike -from photutils import Background2D, SExtractorBackground +from photutils.background import Background2D, SExtractorBackground from photutils.utils import calc_total_error diff --git a/flows/casjobs/casjobs.jar b/flows/casjobs/casjobs.jar deleted file mode 100644 index 94851f6..0000000 Binary files a/flows/casjobs/casjobs.jar and /dev/null differ diff --git a/flows/catalogs.py b/flows/catalogs.py index 90f52f9..915f8c1 100644 --- a/flows/catalogs.py +++ b/flows/catalogs.py @@ -4,6 +4,7 @@ .. codeauthor:: Rasmus Handberg """ +import configparser import logging import os import os.path @@ -17,13 +18,15 @@ import requests from astropy import units as u from astropy.coordinates import Angle, SkyCoord -from astropy.table import MaskedColumn, Table +from astropy.table import MaskedColumn, Table, unique from astropy.time import Time from astroquery import sdss from astroquery.simbad import Simbad from bottleneck import anynan from tendrils.utils import load_config, query_ztf_id +import mastcasjobs + from .aadc_db import AADC_DB logger = logging.getLogger(__name__) @@ -49,42 +52,15 @@ def intval(value): # -------------------------------------------------------------------------------------------------- -def configure_casjobs(overwrite=False): +def casjobs_configured(config: configparser.ConfigParser) -> bool: """ - Set up CasJobs if needed. - - Parameters: - overwrite (bool, optional): Overwrite existing configuration. Default (False) is to not - overwrite existing configuration. - - .. codeauthor:: Rasmus Handberg + Check if casjobs credentials are available. """ - __dir__ = os.path.dirname(os.path.realpath(__file__)) - casjobs_config = os.path.join(__dir__, 'casjobs', 'CasJobs.config') - logger.debug(",".join([casjobs_config,__dir__,os.path.realpath(__file__)])) - if os.path.isfile(casjobs_config) and not overwrite: - return - - config = load_config() wsid = config.get('casjobs', 'wsid', fallback=os.environ.get("CASJOBS_WSID", None)) passwd = config.get('casjobs', 'password', fallback=os.environ.get("CASJOBS_PASSWORD", None)) if wsid is None or passwd is None: - raise CasjobsError("CasJobs WSID and PASSWORD not in config.ini") - - try: - with open(casjobs_config, 'w') as fid: - fid.write("wsid={0:s}\n".format(wsid)) - fid.write("password={0:s}\n".format(passwd)) - fid.write("default_target=HLSP_ATLAS_REFCAT2\n") - fid.write("default_queue=1\n") - fid.write("default_days=1\n") - fid.write("verbose=false\n") - fid.write("debug=false\n") - fid.write("jobs_location=http://mastweb.stsci.edu/gcasjobs/services/jobs.asmx\n") - except: # noqa: E722, pragma: no cover - if os.path.isfile(casjobs_config): - os.remove(casjobs_config) - + raise CasjobsError("CasJobs WSID and PASSWORD not in config.ini / 'CASJOBS_WSID' and 'CASJOBS_PASSWORD' not defined as environment variables") + return True # -------------------------------------------------------------------------------------------------- def query_casjobs_refcat2(coo_centre, radius=24 * u.arcmin): @@ -114,18 +90,17 @@ def query_casjobs_refcat2(coo_centre, radius=24 * u.arcmin): results = _query_casjobs_refcat2_divide_and_conquer(coo_centre, radius=radius) # Remove duplicate entries: - _, indx = np.unique([res['starid'] for res in results], return_index=True) - results = [results[k] for k in indx] + results = unique(results, keys='starid') # Trim away anything outside radius: ra = [res['ra'] for res in results] decl = [res['decl'] for res in results] coords = SkyCoord(ra=ra, dec=decl, unit='deg', frame='icrs') sep = coords.separation(coo_centre) - results = [res for k, res in enumerate(results) if sep[k] <= radius] + mask = sep <= radius - logger.debug("Found %d unique results", len(results)) - return results + logger.debug("Found %d unique results", np.count_nonzero(mask)) + return results[mask] # -------------------------------------------------------------------------------------------------- @@ -176,55 +151,94 @@ def _query_casjobs_refcat2(coo_centre, radius=24 * u.arcmin): if isinstance(radius, (float, int)): radius *= u.deg - sql = "SELECT r.* FROM fGetNearbyObjEq({ra:f}, {dec:f}, {radius:f}) AS n INNER JOIN HLSP_ATLAS_REFCAT2.refcat2 AS r ON n.objid=r.objid ORDER BY n.distance;".format( - ra=coo_centre.ra.deg, dec=coo_centre.dec.deg, radius=Angle(radius).deg) - logger.debug(sql) - - # Make sure that CasJobs have been configured: - configure_casjobs() - - # The command to run the casjobs script: - # BEWARE: This may change in the future without warning - it has before! - cmd = 'java -jar casjobs.jar execute "{0:s}"'.format(sql) - - # Execute the command: - cmd = shlex.split(cmd) - directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'casjobs') - proc = subprocess.Popen(cmd, cwd=directory, stdout=subprocess.PIPE, universal_newlines=True) - stdout, stderr = proc.communicate() - output = stdout.split("\n") + # prepare refcat2 query to find all objects near position + casjobs_context = "HLSP_ATLAS_REFCAT2" + casjobs_personal_table = "flows" + sql = ("""SELECT r.* INTO {table:s} FROM fGetNearbyObjEq({ra:f}, {dec:f}, {radius:f}) AS n + INNER JOIN {context:s}.refcat2 AS r ON n.objid=r.objid ORDER BY n.distance;""" + .format(table=casjobs_personal_table, context=casjobs_context, + ra=coo_centre.ra.deg, dec=coo_centre.dec.deg, radius=Angle(radius).deg)) - # build list of all kois from output from the CasJobs-script: - error_thrown = False - results = [] - for line in output: - line = line.strip() - if line == '': - continue - if 'ERROR' in line: - error_thrown = True - break - - row = line.split(',') - if len(row) == 45 and row[0] != '[objid]:Integer': - results.append( - {'starid': int(row[0]), 'ra': floatval(row[1]), 'decl': floatval(row[2]), 'pm_ra': floatval(row[5]), - 'pm_dec': floatval(row[7]), 'gaia_mag': floatval(row[9]), 'gaia_bp_mag': floatval(row[11]), - 'gaia_rp_mag': floatval(row[13]), 'gaia_variability': intval(row[17]), 'g_mag': floatval(row[22]), - 'r_mag': floatval(row[26]), 'i_mag': floatval(row[30]), 'z_mag': floatval(row[34]), - 'J_mag': floatval(row[39]), 'H_mag': floatval(row[41]), 'K_mag': floatval(row[43]), }) - - if error_thrown: - error_msg = '' - for line in output: - if len(line.strip()) > 0: - error_msg += line.strip() + "\n" - - logger.debug("Error Msg: %s", error_msg) - if 'query results exceed memory limit' in error_msg.lower(): - raise CasjobsMemoryError("Query results exceed memory limit") - else: - raise CasjobsError("ERROR detected in CasJobs: " + error_msg) + config = load_config() + casjobs_configured(config) + jobs = mastcasjobs.MastCasJobs(userid=config.get('casjobs', 'wsid'), + password=config.get('casjobs', 'password'), + context=casjobs_context) + + # limited storage space at remote casjobs, drop table, if it exists, before making a new one + for table in jobs.list_tables(): + if table == casjobs_personal_table: + jobs.drop_table(casjobs_personal_table) # more graceful than drop_table_if_exists() + + # submit job, wait for it to finish and retrieve the results + job_id = jobs.submit(sql, context=casjobs_context, task_name=casjobs_personal_table) + logger.debug("Submitted query '%s' to MAST CasJobs context '%s' with task name '%s'", + sql, casjobs_context, casjobs_personal_table) + code, status = jobs.monitor(job_id, timeout=5) + logger.debug("MAST CasJobs query exited with status %d: %s", code, status) + results = jobs.get_table(casjobs_personal_table) + + # Contents of HLSP_ATLAS_REFCAT2.refcat2 data table - i.e. the columns of 'results' just + # above; from https://galex.stsci.edu/casjobs/MyDB.aspx -> HLSP_ATLAS_REFCAT2.refcat2 + # ----------------------------------------------------------------------------- + # Name Unit Data Type Size Default Value Description + # objid dimensionless bigint 8 Unique object identifier. + # RA degrees float 8 RA from Gaia DR2, J2000, epoch 2015.5 + # Dec degrees float 8 Dec from Gaia DR2, J2000, epoch 2015.5 + # plx mas real 4 Parallax from Gaia DR2 + # dplx mas real 4 Parallax uncertainty + # pmra mas/yr real 4 Proper motion in RA from Gaia DR2 + # dpmra mas/yr real 4 Proper motion uncertainty in RA + # pmdec mas/yr real 4 Proper motion in Dec from Gaia DR2 + # dpmdec mas/yr real 4 Proper motion uncertainty in Dec + # Gaia mag real 4 Gaia DR2 G magnitude + # dGaia mag real 4 Gaia DR2 G magnitude uncertainty + # BP mag real 4 Gaia G_BP magnitude + # dBP mag real 4 Gaia G_BP magnitude uncertainty + # RP mag real 4 Gaia G_RP magnitude + # dRP mag real 4 Gaia G_RP magnitude uncertainty + # Teff [K] int 4 Gaia stellar effective temperature + # AGaia mag real 4 Gaia estimate of G-band extinction for this star + # dupvar dimensionless int 4 Gaia flags coded as CONSTANT (0), VARIABLE (1), or NOT_AVAILABLE (2) + 4*DUPLICATE + # Ag mag real 4 SFD estimate of total column g-band extinction + # rp1 arc seconds real 4 Radius where cumulative G flux exceeds 0.1x this star + # r1 arc seconds real 4 Radius where cumulative G flux exceeds 1x this star + # r10 arc seconds real 4 Radius where cumulative G flux exceeds 10x this star + # g mag real 4 Pan-STARRS g_P1 magnitude + # dg mag real 4 Pan-STARRS g_P1 magnitude uncertainty + # gchi dimensionless real 4 chi^2/DOF for contributors to g + # gcontrib dimensionless int 4 Bitmap of contributing catalogs to g + # r mag real 4 Pan-STARRS r_P1 magnitude + # dr mag real 4 Pan-STARRS r_P1 magnitude uncertainty + # rchi dimensionless real 4 chi^2/DOF for contributors to r + # rcontrib dimensionless int 4 Bitmap of contributing catalogs to r + # i mag real 4 Pan-STARRS i_P1 magnitude + # di mag real 4 Pan-STARRS i_P1 magnitude uncertainty + # ichi dimensionless real 4 chi^2/DOF for contributors to i + # icontrib dimensionless int 4 Bitmap of contributing catalogs to i + # z mag real 4 Pan-STARRS z_P1 magnitude + # dz mag real 4 Pan-STARRS z_P1 magnitude uncertainty + # zchi dimensionless real 4 chi^2/DOF for contributors to z + # zcontrib dimensionless int 4 Bitmap of contributing catalogs to z + # nstat dimensionless int 4 Count of griz deweighted outliers + # J mag real 4 2MASS J magnitude + # dJ mag real 4 2MASS J magnitude uncertainty + # H mag real 4 2MASS H magnitude + # dH mag real 4 2MASS H magnitude uncertainty + # K mag real 4 2MASS K magnitude + # dK mag real 4 2MASS K magnitude uncertainty + + # remove unused columns from results + results.remove_columns(['plx', 'dplx', 'dpmra', 'dpmdec', 'dGaia', 'dBP', 'dRP', 'Teff', 'AGaia', 'Ag', + 'rp1', 'r1', 'r10', 'dg', 'gchi', 'gcontrib', 'dr', 'rchi', 'rcontrib', + 'di', 'ichi', 'icontrib', 'dz', 'zchi', 'zcontrib', 'nstat', 'dJ', 'dH', 'dK']) + + # rename columns to match older java-based implementation which had different naming conventions + # TODO: refactor dependent functions to expect the default namings and get rid of these renamings + # to start: comment out the renamings below and see where the flows pipeline breaks, then fix it + results = Table(results, + names=('starid', 'ra', 'decl', 'pm_ra', 'pm_dec', 'gaia_mag', 'gaia_bp_mag', 'gaia_rp_mag', + 'gaia_variability', 'g_mag', 'r_mag', 'i_mag', 'z_mag', 'J_mag', 'H_mag', 'K_mag')) if not results: raise CasjobsError("Could not find anything on CasJobs") @@ -298,7 +312,9 @@ def query_sdss(coo_centre, radius=24 * u.arcmin, dr=16, clean=True): if isinstance(radius, (float, int)): radius *= u.deg - #SDSS.MAX_CROSSID_RADIUS = radius + 1 * u.arcmin + # from astroquery version 0.4.7 onwards, radius is limited to 3 arcmins - use this value if necessary + radius = radius if radius < sdss.SDSS.MAX_CROSSID_RADIUS else sdss.SDSS.MAX_CROSSID_RADIUS + sdss.conf.skyserver_baseurl = sdss.conf.skyserver_baseurl.replace("http://","https://") AT_sdss = sdss.SDSS.query_region(coo_centre, photoobj_fields=['type', 'clean', 'ra', 'dec', 'psfMag_u'], data_release=dr, timeout=600, radius=radius) @@ -469,10 +485,10 @@ def query_all(coo_centre, radius=24 * u.arcmin, dist_cutoff=2 * u.arcsec): # REFCAT results table does not have uBV N = len(AT_results) - d = np.full(N, np.NaN) - AT_results.add_column(MaskedColumn(name='B_mag', unit='mag', dtype='float32', fill_value=np.NaN, data=d)) - AT_results.add_column(MaskedColumn(name='V_mag', unit='mag', dtype='float32', fill_value=np.NaN, data=d)) - AT_results.add_column(MaskedColumn(name='u_mag', unit='mag', dtype='float32', fill_value=np.NaN, data=d)) + d = np.full(N, np.nan) + AT_results.add_column(MaskedColumn(name='B_mag', unit='mag', dtype='float32', fill_value=np.nan, data=d)) + AT_results.add_column(MaskedColumn(name='V_mag', unit='mag', dtype='float32', fill_value=np.nan, data=d)) + AT_results.add_column(MaskedColumn(name='u_mag', unit='mag', dtype='float32', fill_value=np.nan, data=d)) # Query APASS around the target position: results_apass = query_apass(coo_centre, radius=radius) diff --git a/flows/coordinatematch/coordinatematch.py b/flows/coordinatematch/coordinatematch.py index 7b6360a..2dbba8d 100644 --- a/flows/coordinatematch/coordinatematch.py +++ b/flows/coordinatematch/coordinatematch.py @@ -8,7 +8,11 @@ import numpy as np import time from itertools import count, islice, chain, product, zip_longest -from astropy.coordinates.angle_utilities import angular_separation +# In astropy version 6.0 the deprecated (since version 4.3) astropy.coordinates.angle_utilities is removed; +# astropy.coordinates.{,angle_formats} is to be used instead. +# See https://docs.astropy.org/en/v6.0.0/changelog.html +# from astropy.coordinates.angle_utilities import angular_separation +from astropy.coordinates import angular_separation from astropy.coordinates import SkyCoord import astropy.wcs from scipy.spatial import cKDTree as KDTree diff --git a/flows/image.py b/flows/image.py index 17d9cd8..824404b 100644 --- a/flows/image.py +++ b/flows/image.py @@ -80,7 +80,7 @@ def create_wcs(self) -> WCS: def create_masked_image(self) -> None: """Warning: this is destructive and will overwrite image data setting masked values to NaN""" - self.image[self.mask] = np.NaN + self.image[self.mask] = np.nan self.clean = np.ma.masked_array(data=self.image, mask=self.mask, copy=False) def set_edge_rows_to_value(self, y: Tuple[float] = None, value: Union[int, float, np.float64] = 0) -> None: diff --git a/flows/magnitudes.py b/flows/magnitudes.py index c0244ff..c2253a5 100644 --- a/flows/magnitudes.py +++ b/flows/magnitudes.py @@ -60,7 +60,7 @@ def instrumental_mag(tab: Table, target: Target, make_fig: bool = False) -> Tupl if n_weights > 1: zp_error = np.sqrt(n_weights * nansum(weights * (y - best_fit(x)) ** 2) / nansum(weights) / (n_weights - 1)) else: - zp_error = np.NaN + zp_error = np.nan logger.info('Leastsquare ZP = %.3f, ZP_error = %.3f', zp, zp_error) # Determine sigma clipping sigma according to Chauvenet method diff --git a/flows/photometry.py b/flows/photometry.py index 86918a2..81e0f2b 100644 --- a/flows/photometry.py +++ b/flows/photometry.py @@ -27,8 +27,17 @@ from .result_model import ResultsTable warnings.simplefilter('ignore', category=AstropyDeprecationWarning) -from photutils import CircularAperture, CircularAnnulus, aperture_photometry # noqa: E402 -from photutils.psf import EPSFFitter, BasicPSFPhotometry, DAOGroup, extract_stars # noqa: E402 +from photutils.aperture import CircularAperture, CircularAnnulus, aperture_photometry # noqa: E402 + + +# In photutils version 1.13 the deprecated (since version 1.9) BasicPSFPhotometry is removed; +# PSFPhotometry is to be used instead. +# In photutils version 1.13 the deprecated (since version 1.9) DAOGroup is removed; +# SourceGrouper is to be used instead. +# See https://photutils.readthedocs.io/en/stable/changelog.html +# from photutils.psf import EPSFFitter, BasicPSFPhotometry, DAOGroup, extract_stars # noqa: E402 +from photutils.psf import EPSFFitter, PSFPhotometry, SourceGrouper, extract_stars + from photutils.background import MedianBackground # noqa: E402 import photutils # noqa: E402 @@ -108,10 +117,18 @@ def make_epsf(self, stars): """ # Build ePSF logger.info("Building ePSF...") + + # from photutils docs: recentering_boxsize must have odd values and be greater than or equal to 3 + rcb = int(np.round(2 * self.fwhm)) + rcb = rcb if rcb % 2 else rcb + 1 + # from photutils docs: fit_boxsize must have odd values + fb = int(np.round(1.5 * self.fwhm)) + fb = fb if fb % 2 else fb + 1 + builder = self.epsf_builder( oversampling=1, shape=1 * self.star_size, - fitter=EPSFFitter(fit_boxsize=max(int(np.round(1.5 * self.fwhm)), 5)), - recentering_boxsize=max(int(np.round(2 * self.fwhm)), 5), + fitter=EPSFFitter(fit_boxsize=max(fb, 5)), + recentering_boxsize=max(rcb, 5), norm_radius=max(self.fwhm, 5), maxiters=100, progress_bar=multiprocessing.parent_process() is None and logger.getEffectiveLevel() <= 20 ) @@ -125,7 +142,7 @@ def make_epsf(self, stars): class Photometry: - def __init__(self, photometry_obj: Optional[BasicPSFPhotometry] = None): + def __init__(self, photometry_obj: Optional[PSFPhotometry] = None): self.photometry_obj = photometry_obj @staticmethod @@ -152,16 +169,32 @@ def apphot(self, coordinates: ArrayLike, image: FlowsImage, fwhm: float, use_raw def create_photometry_object(self, fwhm: Union[float, u.Quantity], psf_model: photutils.psf.EPSFModel, fitsize: Union[int, Tuple[int]], fitter: Callable = fitting.LevMarLSQFitter(), bkg: PhotutilsBackground = MedianBackground()): - self.photometry_obj = BasicPSFPhotometry(group_maker=DAOGroup(fwhm), bkg_estimator=bkg, psf_model=psf_model, - fitter=fitter, fitshape=fitsize, aperture_radius=fwhm) - - def psfphot(self, image: ArrayLike, init_table: Table) -> Tuple[BasicPSFPhotometry, Table]: + # 2024-09 erik: + # photutils version 1.1.0 (or possibly lower?): + # self.photometry_obj = PSFPhotometry(group_maker=SourceGrouper(fwhm), bkg_estimator=bkg, psf_model=psf_model, + # fitter=fitter, fitshape=fitsize, aperture_radius=fwhm) + # It seems that 'group_maker' -> 'grouper', 'bkg_estimator' -> 'localbkg_estimator', 'fitshape' -> 'fit_shape' + # in newer photutils versions. + # In addition, localbkg_estimator requires a LocalBackground object which in turn requires us to specify an + # inner and an outer radius, which we have not been doing previously... Comments/suggestions, anyone? + self.photometry_obj = PSFPhotometry(psf_model=psf_model, fit_shape=fitsize, grouper=SourceGrouper(fwhm), + fitter=fitter, + localbkg_estimator=photutils.background.LocalBackground(inner_radius=4*fwhm, + outer_radius=7*fwhm, + bkg_estimator=bkg), + aperture_radius=fwhm) + + def psfphot(self, image: ArrayLike, init_table: Table) -> Tuple[PSFPhotometry, Table]: """PSF photometry on init guesses table/row. """ if self.photometry_obj is None: raise ValueError('Photometry object not initialized.') # logger.info(f"{init_table}") - output: Table = self.photometry_obj(image=image, init_guesses=init_table) + # 2024-09 erik: + # 'image' and 'init_guesses' not recognised. + # Trying to supply image as implicit 'data' and doing 'init_guesses' -> 'init_params'. + # output: Table = self.photometry_obj(image=image, init_guesses=init_table) + output: Table = self.photometry_obj(image, init_params=init_table) return self.photometry_obj, output @staticmethod @@ -173,7 +206,9 @@ def rescale_flux_error(phot_tables: Dict[int, Table], for fit_shape, row in phot_tables.items(): row = row[0] if select_first_row else row - new_err = row['flux_unc'] / exptime + # 'flux_unc' -> 'flux_err' at some version transition of photutils (not documented in changelog, + # found with debugger). + new_err = row['flux_err'] / exptime new_flux = row['flux_fit'] / exptime if new_flux <= flux + flux_err: return fit_shape, new_err @@ -316,7 +351,7 @@ def diff_psf_phot(self) -> Table: init_table=self.init_guesses_diff.init_guess_diff) return psf_tbl - def rescale_uncertainty(self, psfphot_tbl: Table, dynamic: bool = True, + def rescale_uncertainty(self, psfphot_tbl: Table, dynamic: bool = True, static_fwhm: float = 2.5, epsilon_mag: float = 0.004, ensure_greater: bool = True): """ @@ -331,7 +366,7 @@ def rescale_uncertainty(self, psfphot_tbl: Table, dynamic: bool = True, static_fwhm : float FWHM multiple to use incase dynamic fails or don't want to use it. Default 2.5 determined empirically. epsilon_mag : float - Small magnitude change within which new and old uncertainties are considered the same. + Small magnitude change within which new and old uncertainties are considered the same. Should be smaller than ~1/2 the expected uncertainty. """ # Rescale psf errors from fit iteratively @@ -346,7 +381,7 @@ def rescale_uncertainty(self, psfphot_tbl: Table, dynamic: bool = True, if self.diff_im_exists: _table = self.diff_psf_phot() _table = self.raw_psf_phot(self.init_guesses.init_guess_target) - if "flux_unc" in _table.colnames: + if "flux_err" in _table.colnames: _phot_tables_dict[fitshape] = _table if len(_phot_tables_dict) == 0: @@ -355,7 +390,7 @@ def rescale_uncertainty(self, psfphot_tbl: Table, dynamic: bool = True, else: # Find the fit shape elbow: flux = psfphot_tbl[0]['flux_fit'] - flux_err = psfphot_tbl[0]['flux_unc'] + flux_err = psfphot_tbl[0]['flux_err'] exptime = self.image.exptime dynamic_fit_shape, new_err = self.photometry.rescale_flux_error(_phot_tables_dict, flux, flux_err, exptime) @@ -365,13 +400,13 @@ def rescale_uncertainty(self, psfphot_tbl: Table, dynamic: bool = True, logger.info(f"Recalculating all reference uncertainties using new fitsize:" f" {fit_shape} pixels, ({fit_shape/self.fwhm if dynamic else static_fwhm :.2} * FWHM).") psfphot_tbl_rescaled = self.psfphot(fit_shape) - if psfphot_tbl['flux_unc'][0] > psfphot_tbl_rescaled['flux_unc'][0] + epsilon_mag and ensure_greater: + if psfphot_tbl['flux_err'][0] > psfphot_tbl_rescaled['flux_err'][0] + epsilon_mag and ensure_greater: logger.info("Recalculated uncertainties were smaller than original and ``ensure_greater`` was True:" "Not using rescaled uncertainties for the SN.") - psfphot_tbl['flux_unc'][1:] = psfphot_tbl_rescaled['flux_unc'][1:] + psfphot_tbl['flux_err'][1:] = psfphot_tbl_rescaled['flux_err'][1:] return psfphot_tbl - psfphot_tbl['flux_unc'] = psfphot_tbl_rescaled['flux_unc'] + psfphot_tbl['flux_err'] = psfphot_tbl_rescaled['flux_err'] return psfphot_tbl def make_result_table(self, psfphot_tbl: Table, apphot_tbl: Table): @@ -441,9 +476,9 @@ def do_phot(fileid: int, cm_timeout: Optional[float] = None, make_plots: bool = # Do photometry apphot_tbl = pm.apphot() # Verify uncertainty exists after PSF phot: - psfphot_tbl = ResultsTable.verify_uncertainty_column(pm.psfphot()) + psfphot_tbl = ResultsTable.verify_uncertainty_column(pm.psfphot()) if rescale: # Rescale uncertainties - psfphot_tbl = pm.rescale_uncertainty(psfphot_tbl, dynamic=rescale_dynamic) + psfphot_tbl = pm.rescale_uncertainty(psfphot_tbl, dynamic=rescale_dynamic) # Build results table and calculate magnitudes pm.make_result_table(psfphot_tbl, apphot_tbl) diff --git a/flows/reference_cleaning.py b/flows/reference_cleaning.py index 78fb1c7..e6e9214 100644 --- a/flows/reference_cleaning.py +++ b/flows/reference_cleaning.py @@ -25,7 +25,15 @@ from bottleneck import nanmedian, nansum, nanmean, replace from scipy.spatial import KDTree import pandas as pd # TODO: Convert to pure numpy implementation -import sep +# From https://github.com/PJ-Watson/sep-pjw: +# "The original release of sep by Kyle Barbary no longer appears to be maintained. +# [...] The aim of sep-pjw is to offer a version of sep that resolves [support] issues, +# whilst maintaining compatibility as much as is feasibly possible. [...] +# For existing workflows, the only necessary update will be to change the import to [sep_pjw]" +#import sep +import sep_pjw + + from .image import FlowsImage from .target import Target from .utilities import create_logger @@ -46,8 +54,8 @@ class References: xy: Optional[RefTable] = None def replace_nans_pm(self) -> None: - replace(self.table['pm_ra'], np.NaN, 0.) - replace(self.table['pm_dec'], np.NaN, 0.) + replace(self.table['pm_ra'], np.nan, 0.) + replace(self.table['pm_dec'], np.nan, 0.) def make_sky_coords(self, reference_time: float = 2015.5) -> None: self.replace_nans_pm() @@ -97,9 +105,9 @@ def add_target(self, target: Target, starid: int = 0) -> None: def use_sep(image: FlowsImage, tries: int = 5, thresh: float = 5.): # Use sep to for soure extraction - sep_background = sep.Background(image.image, mask=image.mask) + sep_background = sep_pjw.Background(image.image, mask=image.mask) try: - objects = sep.extract(image.image - sep_background, thresh=thresh, err=sep_background.globalrms, + objects = sep_pjw.extract(image.image - sep_background, thresh=thresh, err=sep_background.globalrms, mask=image.mask, deblend_cont=0.1, minarea=9, clean_param=2.0) except KeyboardInterrupt as e: raise e @@ -153,9 +161,9 @@ def force_reject_g2d(xarray: ArrayLike, yarray: ArrayLike, image: Union[NDArray, # Stars reject N = len(xarray) - fwhms = np.full((N, 2), np.NaN) - xys = np.full((N, 2), np.NaN) - rsqs = np.full(N, np.NaN) + fwhms = np.full((N, 2), np.nan) + xys = np.full((N, 2), np.nan) + rsqs = np.full(N, np.nan) for i, (x, y) in enumerate(zip(xarray, yarray)): x = int(np.round(x)) y = int(np.round(y)) @@ -177,8 +185,8 @@ def force_reject_g2d(xarray: ArrayLike, yarray: ArrayLike, image: Union[NDArray, nan_filter = nan_filter & np.isfinite(curr_star) if len(curr_star[nan_filter]) < 3: # Not enough pixels to fit logger.debug(f"Not enough pixels to fit star, curr_star[nan_filter]:{curr_star[nan_filter]}") - rsqs[i] = np.NaN - fwhms[i] = np.NaN + rsqs[i] = np.nan + fwhms[i] = np.nan continue gfit = gfitter(g2d, x=xpos[nan_filter], y=ypos[nan_filter], z=curr_star[nan_filter]) diff --git a/flows/result_model.py b/flows/result_model.py index 8e8b740..b177989 100644 --- a/flows/result_model.py +++ b/flows/result_model.py @@ -68,17 +68,19 @@ def make_results_table(cls, ref_table: Table, apphot_tbl: Table, psfphot_tbl: Ta results_table['flux_aperture'] = apphot_tbl['flux_aperture'] / image.exptime results_table['flux_aperture_error'] = apphot_tbl['flux_aperture_error'] / image.exptime results_table['flux_psf'] = psfphot_tbl['flux_fit'] / image.exptime - results_table['flux_psf_error'] = psfphot_tbl['flux_unc'] / image.exptime + results_table['flux_psf_error'] = psfphot_tbl['flux_err'] / image.exptime results_table['pixel_column_psf_fit'] = psfphot_tbl['x_fit'] results_table['pixel_row_psf_fit'] = psfphot_tbl['y_fit'] - results_table['pixel_column_psf_fit_error'] = psfphot_tbl['x_0_unc'] - results_table['pixel_row_psf_fit_error'] = psfphot_tbl['y_0_unc'] + # 'x_0_unc' -> 'x_err' and 'y_0_unc' -> 'y_err' at some version transition of photutils (not documented in + # changelog, found with debugger). + results_table['pixel_column_psf_fit_error'] = psfphot_tbl['x_err'] + results_table['pixel_row_psf_fit_error'] = psfphot_tbl['y_err'] return results_table @staticmethod def verify_uncertainty_column(tab): - if "flux_unc" in tab.colnames: + if "flux_err" in tab.colnames: return tab - tab['flux_unc'] = tab['flux_fit'] * 0.04 # Assume 4% errors + tab['flux_err'] = tab['flux_fit'] * 0.04 # Assume 4% errors logger.warning("Flux uncertainty not found from PSF fit, assuming 4% error.") diff --git a/flows/target.py b/flows/target.py index 25902c7..e98b63c 100644 --- a/flows/target.py +++ b/flows/target.py @@ -7,6 +7,8 @@ from numpy.typing import NDArray from tendrils import api +from flows.aadc_db import AADC_DB + @dataclass class Target: @@ -75,3 +77,17 @@ def from_tid(cls, target_id: int) -> 'Target': return cls( ra=target_pars['ra'], dec=target_pars['decl'], name=target_pars['target_name'], id=target_pars['targetid']) + + @classmethod + def from_tname(cls, target_name: str) -> 'Target': + # TODO: better to define a function 'api.get_target(target_name)', avoiding db connection... + + """ + Create target from target name. + """ + with AADC_DB() as db: + # SELECT fileid,path,targetid FROM flows.files LEFT JOIN flows.photometry_details d ON d.fileid_phot=files.fileid WHERE files.datatype=2 AND d.fileid_phot IS NULL; + db.cursor.execute("select targetid from flows.targets WHERE target_name=%s;", [target_name]) + for row in db.cursor.fetchall(): + target_id = row['targetid'] + return cls.from_tid(target_id) diff --git a/flows/visibility.py b/flows/visibility.py index 5c99faf..2be8130 100644 --- a/flows/visibility.py +++ b/flows/visibility.py @@ -16,9 +16,10 @@ import astropy.units as u import matplotlib.pyplot as plt import numpy as np -from astropy.coordinates import AltAz, SkyCoord, get_moon, get_sun +from astropy.coordinates import AltAz, SkyCoord, get_body, get_sun from astropy.time import Time from astropy.visualization import quantity_support +from matplotlib import pyplot from matplotlib.dates import DateFormatter from tendrils import api @@ -49,7 +50,7 @@ def visibility(target: Target, siteid: Optional[int] = None, date=None, output=N if date is None: date = datetime.utcnow() elif isinstance(date, str): - date = datetime.strptime(date, '%Y-%m-%d') + date = datetime.strptime(date, '%Y%m%d') # Coordinates of object: obj = SkyCoord(ra=target.ra, dec=target.dec, unit='deg', frame='icrs') @@ -68,6 +69,8 @@ def visibility(target: Target, siteid: Optional[int] = None, date=None, output=N plotpath = os.path.join(output, "visibility_%s_%s_site%02d.png" % ( target.name, date.strftime('%Y%m%d'), site['siteid'])) else: + if not os.path.exists(os.path.dirname(output)): + os.makedirs(os.path.dirname(output)) plotpath = output logger.debug("Will save visibility plot to '%s'", plotpath) @@ -95,7 +98,7 @@ def visibility(target: Target, siteid: Optional[int] = None, date=None, output=N # The Sun and Moon: altaz_sun = get_sun(times).transform_to(AltAzFrame) - altaz_moon = get_moon(times).transform_to(AltAzFrame) + altaz_moon = get_body("moon", times).transform_to(AltAzFrame) sundown_astro = (altaz_sun.alt < -6 * u.deg) if np.any(sundown_astro): @@ -110,7 +113,7 @@ def visibility(target: Target, siteid: Optional[int] = None, date=None, output=N plt.grid(ls=':', lw=0.5) ax.plot(times.datetime, altaz_sun.alt, color='y', label='Sun') ax.plot(times.datetime, altaz_moon.alt, color=[0.75] * 3, ls='--', label='Moon') - objsc = ax.scatter(times.datetime, altaz_obj.alt, c=altaz_obj.az, label=target.name, lw=0, s=8, + objsc = ax.scatter(times.datetime, altaz_obj.alt.deg, c=altaz_obj.az.deg, label=target.name, lw=0, s=8, cmap='twilight') ax.fill_between(times.datetime, 0 * u.deg, 90 * u.deg, altaz_sun.alt < -0 * u.deg, color='0.5', zorder=0) # , label='Night' @@ -139,5 +142,6 @@ def visibility(target: Target, siteid: Optional[int] = None, date=None, output=N if output: return plotpaths - plt.show() + if pyplot.isinteractive(): + plt.show() return ax diff --git a/flows_create_visibility_plots.py b/flows_create_visibility_plots.py new file mode 100644 index 0000000..9854517 --- /dev/null +++ b/flows_create_visibility_plots.py @@ -0,0 +1,159 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import argparse +import logging +import os +import datetime +import shutil +import glob +import re +import requests +import urllib3 +import ssl +import sys +from tqdm import tqdm +if os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows')) not in sys.path: + sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows'))) +import flows +from flows.target import Target +from flows.aadc_db import AADC_DB +from tendrils import api + +#--------- +# https://stackoverflow.com/questions/66689696/urllib3-error-ssl-wrong-signature-type +class SslOldHttpAdapter(requests.adapters.HTTPAdapter): + def init_poolmanager(self, connections, maxsize, block=False): + ctx = ssl.create_default_context() + ctx.set_ciphers('DEFAULT@SECLEVEL=1') + self.poolmanager = urllib3.poolmanager.PoolManager( + ssl_version=ssl.PROTOCOL_TLS, + ssl_context=ctx) + +#--------- +if __name__ == '__main__': + + # Parse command line arguments: + parser = argparse.ArgumentParser(description='Download ZTF photometry.') + parser.add_argument('-d', '--debug', help='Print debug messages.', action='store_true') + parser.add_argument('-q', '--quiet', help='Only report warnings and errors.', action='store_true') + parser.add_argument('--fast', action='store_true') + args = parser.parse_args() + + # Set logging level: + logging_level = logging.INFO + if args.quiet: + logging_level = logging.WARNING + elif args.debug: + logging_level = logging.DEBUG + + # Setup logging: + formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + console = logging.StreamHandler() + console.setFormatter(formatter) + logger = logging.getLogger('flows') + if not logger.hasHandlers(): + logger.addHandler(console) + logger.setLevel(logging_level) + + today = datetime.datetime.utcnow() + tomorrow = datetime.datetime.utcnow() + datetime.timedelta(hours=24) + yesterday = datetime.datetime.utcnow() - datetime.timedelta(hours=24) + + # Automatically reject targets that are more than 2 months old: + + + + with AADC_DB() as db: + db.cursor.execute("""UPDATE flows.targets SET target_status='rejected' WHERE + target_status='candidate' + AND inserted < (NOW() at time zone 'utc') - '3 months'::interval;""") # TODO: Change to last_modified! + n_rejected = db.cursor.rowcount + db.conn.commit() + logger.info("%d targets automatically rejected.", n_rejected) + + # Sort the targets by when they were inserted: + targets = api.get_targets() + targets = sorted(targets, key=lambda x: x['inserted'])[::-1] + + tqdm_settings = {'disable': None if logger.isEnabledFor(logging.INFO) else True} + + # Start requests session and allow older SSL settings + # since sdss.org is not that modern: + ses = requests.Session() + ses.mount('https://skyserver.sdss.org', SslOldHttpAdapter()) + + regex_year = re.compile(r'^(\d+)') + for tgt in tqdm(targets, **tqdm_settings): + + m = regex_year.match(tgt['target_name']) + year = int(m.group(1)) + + outdir = os.path.join('/archive/candidates', str(year), tgt['target_name']) + + # If running in fast mode, jump over everything + if args.fast and (tgt['inserted'] < yesterday or os.path.isdir(outdir)): + continue + + if tgt['target_status'] not in ('candidate', 'target'): + # Cleanup any leftover directories: + if os.path.isdir(outdir): + logger.info("Deleting old directory for %s...", tgt['target_name']) + shutil.rmtree(outdir) + continue + + logger.info("Creating plots for %s...", tgt['target_name']) + os.makedirs(outdir, exist_ok=True) + + tgtobj = Target.from_tid(tgt['targetid']) + + # FIXME: Limiting to only siteid=2 (VLT) right now! + flows.visibility(target=tgtobj, siteid=2, date=today.strftime('%Y%m%d'), output=outdir, overwrite=False) + + # Create plots for tomorrow already: + flows.visibility(target=tgtobj, siteid=2, date=tomorrow.strftime('%Y%m%d'), output=outdir, overwrite=False) + + regex_fname = re.compile(r'^visibility_%s_(\d+)_site\d+\.png$' % tgt['target_name']) + for f in glob.iglob(os.path.join(outdir, 'visibility_*.png')): + bname = os.path.basename(f) + + m = regex_fname.match(bname) + if m: + file_time = datetime.datetime.strptime(m.group(1), '%Y%m%d') + file_age = file_time - today + + if file_age < datetime.timedelta(hours=-48): + logger.info("Deleting old file: %s", bname) + os.remove(f) + + else: + logger.warning("Deleting non-matching file: %s", f) + os.remove(f) + + #------------------------------------------------------------------------------------------ + + filename = tgt['target_name'] + '_sdss_thumb.jpg' + filepath = os.path.join(outdir, filename) + if not os.path.isfile(filepath): + logger.info("Downloading SDSS thumbnail for %s...", tgt['target_name']) + params = { + 'ra': tgt['ra'], + 'dec': tgt['decl'], + 'scale': 0.8, # 1.79224, + 'width': 150, + 'height': 150, + 'opt': 'GI' + } + + r = ses.get('https://skyserver.sdss.org/dr16/SkyServerWS/ImgCutout/getjpeg', + params=params, stream=True) + if r.status_code == 404: # This is allowed! + if os.path.isfile(filepath): + os.remove(filepath) + else: + r.raise_for_status() + + # Save the image to disk: + with open(filepath, 'wb') as fd: + for chunk in r.iter_content(chunk_size=128): + fd.write(chunk) diff --git a/flows_daily_candidate_email.py b/flows_daily_candidate_email.py new file mode 100644 index 0000000..5e14e13 --- /dev/null +++ b/flows_daily_candidate_email.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +# TODO: Imported as is/was from zion:/usr/users/kasoc/flows/ (zion.phys.au.dk: 10.28.0.245), +# so will need modifications + +import argparse +import logging +import os +import datetime +#import getpass +import smtplib +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from astropy.coordinates import Angle +import sys +if '/usr/users/kasoc/Preprocessing/' not in sys.path: + sys.path.insert(0, '/usr/users/kasoc/Preprocessing/') +#sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows'))) +from kasocutil import psql_connect # 2024-12-19 erik: no trace of this module in '/usr/users/kasoc' ... +#import flows + +if __name__ == '__main__': + + # Parse command line arguments: + parser = argparse.ArgumentParser(description='Send out candidate e-mails.') + parser.add_argument('-d', '--debug', help='Print debug messages.', action='store_true') + parser.add_argument('-q', '--quiet', help='Only report warnings and errors.', action='store_true') + args = parser.parse_args() + + # Set logging level: + logging_level = logging.INFO + if args.quiet: + logging_level = logging.WARNING + elif args.debug: + logging_level = logging.DEBUG + + # Setup logging: + formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + console = logging.StreamHandler() + console.setFormatter(formatter) + logger = logging.getLogger(__name__) + if not logger.hasHandlers(): + logger.addHandler(console) + logger.setLevel(logging_level) + + #passwd = getpass.getpass('Password: ') + + with psql_connect('flows_notifier') as conn: + cursor = conn.cursor() + + cursor.execute("SELECT * FROM flows.targets WHERE target_status='candidate' AND included_in_email IS NULL ORDER BY target_name;") + results = cursor.fetchall() + + if results: + logger.info("%d new candidates found.", len(results)) + + now = datetime.datetime.utcnow() + + html = """\ + + + + + """ + html += "

Flows, Aarhus University
\n" + now.strftime('%d %B %Y %H:%M') + "

"; + html += "

Dear Flows members,

" + + html += "

The following candidates have been automatically added to the Flows candidate list:

" + html += '' + html += "" + html += '' + html += '' + html += '' + html += '' + html += '' + html += '' + html += "" + html += "" + for row in results: + print(row) + + html += "" + html += ''.format(row['targetid'], row['target_name']) + html += ''.format(Angle(row['ra']/15, unit='deg').to_string(sep=':', precision=1)) + html += ''.format(Angle(row['decl'], unit='deg').to_string(sep=':', alwayssign=True, precision=1)) + html += ''.format(row['redshift']) + if row['discovery_mag'] is None: + html += '' + else: + html += ''.format(row['discovery_mag']) + if row['discovery_date'] is None: + html += '' + else: + html += ''.format(row['discovery_date'].strftime('%Y-%m-%d %H:%M')) + html += "\n" + html += "
CandidateRADecRedshiftDiscovery mag.Discovery date
{1:s}{0}{0}{0:.3f} {0:.2f} {0}
" + + html += "

Best regards,
The Flows Team

" + #html += "

If you no longer wish to receive these e-mails, go to 'My Account' on the TASOC website to disable e-mail notifications about upcoming events.

" + html += "" + html += "" + + #recipients = ['rasmush@phys.au.dk'] + recipients = ['flows-work.phys@maillist.au.dk'] + + # Send an e-mail that the file is ready for download: + # Create message container - the correct MIME type is multipart/alternative. + msg = MIMEMultipart('alternative') + msg['Subject'] = "New Flows candidates" + msg['From'] = "Flows " + msg['To'] = ', '.join(recipients) + + #msg.attach(MIMEText(text, 'plain')) + msg.attach(MIMEText(html, 'html')) + + # Send the message via local SMTP server. + s = smtplib.SMTP('localhost') + # sendmail function takes 3 arguments: sender's address, recipient's address + # and message to send - here it is sent as one string. + s.sendmail(msg['From'], recipients, msg.as_string()) + s.quit() + logger.info("E-mail sent!") + + for row in results: + cursor.execute("UPDATE flows.targets SET included_in_email=%s WHERE targetid=%s;", [now, row['targetid']]) + conn.commit() + logger.info("Targets updated.") + + else: + logger.warning("No new candidates to broadcast") + + cursor.close() + + logger.info("Done.") diff --git a/flows_mwmap.py b/flows_mwmap.py new file mode 100644 index 0000000..1e0974d --- /dev/null +++ b/flows_mwmap.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" + +.. codeauthor:: Simon Holmbo +.. codeauthor:: Rasmus Handberg +""" + +import os.path +import sys +import tempfile +import shutil +import subprocess +import numpy as np +from scipy import interpolate +import matplotlib.pyplot as plt +from matplotlib.patches import Rectangle +from astropy.io import fits +from astropy.coordinates import SkyCoord +import healpy +if os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows')) not in sys.path: + sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows'))) +from tendrils import api +from tendrils.utils import urls_from_config +from flows.plots import plots_noninteractive +import urllib.request + +#-------------------------------------------------------------------------------------------------- +def optipng(fpath): + subprocess.run(['optipng', '-preserve', '-quiet', fpath]) + +#-------------------------------------------------------------------------------------------------- +def healpix_to_cartesian(data): + fig = plt.figure(0) + healpy.cartview(data, fig=0, nest=True) + data = fig.axes[0].images[0].get_array() + fig.clf() + plt.close(fig) + return data + +#-------------------------------------------------------------------------------------------------- +if __name__ == '__main__': + # Shorthands used later on: + __dir__ = os.path.dirname(os.path.abspath(__file__)) + + # Switch on non-interactive backend: + plots_noninteractive() + + # download milkyway data if it does not exist locally + filename = os.path.join(__dir__, "flows_mwmap_milkyway.fits") + if not os.path.exists(filename): + mwfile = urllib.request.urlretrieve(url='https://anon.erda.au.dk/share_redirect/fnsq9iKMmp', filename=filename) + + # Load milky-way data: + print("loading mw data:", filename) + with fits.open(filename, mode='readonly') as hdul: + data = hdul[1].data["temperature"] + + data = healpix_to_cartesian(data) + LON, LAT = np.meshgrid(np.linspace(180, -180, data.shape[1]) % 360, np.linspace(-90, 90, data.shape[0])) + RADEC = SkyCoord(LON, LAT, frame="galactic", unit="degree") + RA, DEC = RADEC.icrs.ra.degree, RADEC.icrs.dec.degree + ra, dec = np.meshgrid(np.linspace(180, -180, data.shape[1]) % 360, np.linspace(-90, 90, data.shape[0])) + data = interpolate.griddata(list(zip(RA.ravel(), DEC.ravel())), data.ravel(), list(zip(ra.ravel(), dec.ravel()))).reshape(data.shape) + + # FLOWS + urls = urls_from_config() + print("loading flows data:", urls.targets_url) + dtype = {"names": ("name", "ra", "dec", "redshift"), "formats": ("U10", 'f', 'f', 'f')} + targets = np.array([(obj["target_name"], obj["ra"], obj["decl"], obj["redshift"]) for obj in api.get_targets() if obj["project"] == "flows" and obj["target_status"] == "target"], dtype=dtype) + + print("plotting data...", np.size(targets, axis=0), "targets") + fig = plt.figure(figsize=(8, 4)) + ax = plt.subplot(projection="mollweide") + X, Y = np.meshgrid(np.linspace(np.pi, -np.pi, data.shape[1]), np.linspace(-np.pi/2, np.pi/2, data.shape[0])) + vmin, vmax = np.nanquantile(np.log(data), (.01, .99)) + ax.pcolormesh(X, Y, np.log(data), cmap="gray_r", vmin=vmin, vmax=vmax, shading="auto", rasterized=True) + + ra = (np.deg2rad(targets["ra"]) + np.pi) % (2*np.pi) - np.pi + dec = np.deg2rad(targets["dec"]) + + vmin, vmax = 0.00, 0.10 # np.nanquantile(targets["redshift"], (.01, .99)) + points = ax.scatter(ra, dec, s=10, c=targets["redshift"], cmap="Reds", vmin=vmin, vmax=vmax, zorder=2) + + for x in (-150, -120, -90, -60, -30, 0, 30, 60, 90, 120, 150): + ax.text(np.deg2rad(x), 0, fr"{x}$\degree$", color="black", ha="center", va="center", zorder=3) + ax.add_patch(Rectangle((np.deg2rad(x)-.2, -.05), .4, .1325, color="white", linewidth=0, alpha=.65, zorder=2)) + ax.set_xticklabels([]) + + ax.grid(color="black", ls="dotted", lw=.5, zorder=1) + + N = 1000 + + cbar = ax.inset_axes([0.5, 0.0, 0.6, 1.0], zorder=-1) + X, Y = np.meshgrid(*map(np.arange, (N, N))) + Z = -np.arctan2(Y - N/2, X - N/2) + Z[Z <= -np.pi/4] = np.nan + Z[Z >= +np.pi/4] = np.nan + Z[(X - N/2)**2 + (Y - N/2)**2 <= (N/2.9)**2] = np.nan + Z[(X - N/2)**2 + (Y - N/2)**2 >= (N/2.5)**2] = np.nan + cbar.imshow(Z, cmap="Reds", aspect="auto", interpolation="nearest") + + xy = [N/2 + N/2.5 * np.array([np.cos(t), np.sin(t)]) for t in np.linspace(-np.pi/4, np.pi/4, N//5)] + xy += [N/2 + N/2.9 * np.array([np.cos(t), np.sin(t)]) for t in np.linspace(+np.pi/4, -np.pi/4, N//5)] + [xy[0]] + cbar.plot(*zip(*xy), marker=None, linewidth=1, color="black") + + for i, t in enumerate(np.linspace(-np.pi/4, +np.pi/4, 9+10+2)[1:-1]): + s = 0.1 if not (i + 1) % 2 else 0.05 + xy = [N/2 + N/2.5 * np.array([np.cos(t), np.sin(t)])] + xy += [N/2 + N/(2.5+s) * np.array([np.cos(t), np.sin(t)])] + cbar.plot(*zip(*xy), marker=None, linewidth=1, color="black") + + cbar.text(N/2 + N/2.7 * np.cos(+np.pi/3.25), N/2 + N/2.7 * np.sin(+np.pi/3.25), f"z = {vmin:.2f}", va="top") + cbar.text(N/2 + N/2.7 * np.cos(-np.pi/3.25), N/2 + N/2.7 * np.sin(-np.pi/3.25), f"z = {vmax:.2f}", va="bottom") + cbar.axis("off") + + # Work in temp directory: + with tempfile.TemporaryDirectory() as tmpdir: + # Save figure: + fpath = os.path.join(tmpdir, 'mwmap.png') + fig.savefig(fpath, bbox_inches='tight', transparent=True) + + # Optimize the generated image: + optipng(fpath) + + # Overwrite existing file: + shutil.move(fpath, os.path.join(__dir__, "mwmap.png")) diff --git a/notes/LCOGT_Reduce_SingleStandards.ipynb b/notes/LCOGT_Reduce_SingleStandards.ipynb index f161ec0..5a88e69 100644 --- a/notes/LCOGT_Reduce_SingleStandards.ipynb +++ b/notes/LCOGT_Reduce_SingleStandards.ipynb @@ -214,10 +214,10 @@ "from astropy.nddata import NDData\n", "from astropy.modeling.fitting import LevMarLSQFitter\n", "\n", - "from photutils import CircularAperture, CircularAnnulus\n", + "from photutils.aperture import CircularAperture, CircularAnnulus, aperture_photometry\n", "from photutils.psf import extract_stars\n", - "from photutils import aperture_photometry, EPSFBuilder, EPSFFitter\n", - "from photutils.psf import BasicPSFPhotometry, DAOGroup\n", + "from photutils import EPSFBuilder, EPSFFitter\n", + "from photutils.psf import PSFPhotometry, PSFPhotometry\n", "from photutils.background import MMMBackground\n", "\n", "from imexam.imexamine import Imexamine\n", @@ -543,8 +543,8 @@ "\n", "coordinates.psf = list()\n", "\n", - "photometry = BasicPSFPhotometry(\n", - " group_maker = DAOGroup(FWHM),\n", + "photometry = PSFPhotometry(\n", + " group_maker = PSFPhotometry(FWHM),\n", " bkg_estimator = MMMBackground(),\n", " psf_model = image.epsf,\n", " fitter = LevMarLSQFitter(),\n", @@ -562,7 +562,7 @@ " dict(x_0=coordinates.centered_x[i], y_0=coordinates.centered_y[i])\n", " ])\n", " )\n", - " coordinates.psf.append((res['flux_fit'].data[0], res['flux_unc'].data[0]))\n", + " coordinates.psf.append((res['flux_fit'].data[0], res['flux_err'].data[0]))\n", "\n", "if verbose: \n", " print('Psf Phot Success')\n", diff --git a/requirements.txt b/requirements.txt index 05dd931..6f82d1e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,21 +1,23 @@ -numpy == 1.23.5 -scipy >= 1.5.4 -astropy == 5.0.4 -photutils == 1.1.0 -Bottleneck >= 1.3.4 -matplotlib >= 3.3.1 -mplcursors >= 0.3 -seaborn -pandas -requests -PyYAML -psycopg2-binary -jplephem -scikit-image >= 0.17.2 -tqdm -pytz -sep -astroalign > 2.3 -networkx -astroquery == 0.4.6 -tendrils >= 0.1.5 +numpy >= 2.1.1 +scipy >= 1.14.1 +astropy >= 6.1.3 +photutils >= 1.13.0 +Bottleneck >= 1.4.0 +matplotlib >= 3.9.2 +mplcursors >= 0.5.3 +seaborn >= 0.13.2 +pandas >= 2.2.2 +requests >= 2.32.3 +PyYAML >= 6.0.2 +psycopg2-binary >= 2.9.9 +jplephem >= 2.22 +scikit-image >= 0.24.0 +tqdm >= 4.66.5 +pytz >= 2024.2 +sep-pjw >= 1.3.5 +astroalign >= 2.5.1 +networkx >= 3.3 +astroquery >= 0.4.7 +tendrils >= 0.3.2 +mastcasjobs >= 0.0.6 +healpy >= 1.17.3 diff --git a/requirements.txt.erik b/requirements.txt.erik new file mode 100644 index 0000000..553ca0f --- /dev/null +++ b/requirements.txt.erik @@ -0,0 +1,21 @@ +numpy-2.1.1 +scipy-1.14.1 +astropy-6.1.3 +photutils-1.13.0 +bottleneck-1.4.0 +matplotlib-3.9.2 +mplcursors-0.5.3 +seaborn-0.13.2 +pandas-2.2.2 +requests-2.32.3 +pyyaml-6.0.2 +psycopg2-binary-2.9.9 +jplephem-2.22 +scikit_image-0.24.0 +tqdm-4.66.5 +pytz-2024.2 +sep_pjw-1.3.5 +astroalign-2.5.1 +networkx-3.3 +astroquery-0.4.7 +tendrils-0.3.2 diff --git a/run_catalogs.py b/run_catalogs.py index 5e283d1..e230ff3 100644 --- a/run_catalogs.py +++ b/run_catalogs.py @@ -14,7 +14,9 @@ def parse(): parser = argparse.ArgumentParser(description='Run catalog.') parser.add_argument('-d', '--debug', help='Print debug messages.', action='store_true') parser.add_argument('-q', '--quiet', help='Only report warnings and errors.', action='store_true') - parser.add_argument('-t', '--target', type=str, help='Target to print catalog for.', nargs='?', default=None) + parser.add_argument('-c', '--commit', help='Commit downloaded catalog(s) to flows database.', action='store_true') + parser.add_argument('-p', '--print', help='Print single catalog which already exists in flows database.', action='store_true') + parser.add_argument('-t', '--target', type=str, help='Optionally specify just one target for downloading/committing/printing.', nargs='?', default=None) return parser.parse_args() def set_logging_level(args): @@ -40,19 +42,27 @@ def main(): logger.addHandler(console) logger.setLevel(logging_level) - # Get missing - for target in api.get_catalog_missing(): - logger.info("Downloading catalog for target=%s...", target) - download_catalog(target) # @TODO: refactor to Tendrils - - # download target catalog for printing + targets = api.get_catalog_missing() if args.target is not None: - cat = api.get_catalog(args.target) - - print(f"Target:{cat['target'].pprint_all()} " - f"\nReferences: {cat['references'].pprint_all()} " - f"\nAvoid:cat['avoid'].pprint_all()") - + if args.print is None: + # We want to download and possibly commit the target catalog + if int(args.target) in targets: + logger.info("Downloading catalog for target=%s (committing to db=%s)...", args.target, args.commit) + download_catalog(args.target, update_existing=args.commit) # @TODO: refactor to Tendrils + else: + logger.warning("Cannot find target=%s in list generated by api.get_catalog_missing()", args.target) + else: + # Download target catalog from db for printing + cat = api.get_catalog(args.target) + print(f"Target:{cat['target'].pprint_all()} " + f"\nReferences: {cat['references'].pprint_all()} " + f"\nAvoid:cat['avoid'].pprint_all()") + else: + # Download and possibly commit all missing target catalogs + logger.info("%d catalogs missing", len(targets)) + for target in targets: + logger.info("Downloading catalog for target=%s...", target) + download_catalog(target, update_existing=args.commit) # @TODO: refactor to Tendrils if __name__ == '__main__': main() diff --git a/run_download_ztf.py b/run_download_ztf.py index c711c31..f7652b2 100644 --- a/run_download_ztf.py +++ b/run_download_ztf.py @@ -20,6 +20,7 @@ def main(): parser.add_argument('-q', '--quiet', help='Only report warnings and errors.', action='store_true') parser.add_argument('-t', '--target', type=str, default=None, help='Target to download ZTF photometry for.') parser.add_argument('-o', '--output', type=str, default=None, help='Directory to save output to.') + parser.add_argument('-m', '--missing', help='Download ZTF photometry for all missing targets/candidates.', action='store_true') args = parser.parse_args() # Set logging level: @@ -62,10 +63,14 @@ def main(): # Loop through targets: for tgt in targets: + # only interested in candidates and targets + if tgt['target_status'] == 'rejected': + continue + logger.debug("Target: %s", tgt) target_name = tgt['target_name'] - # Paths to the files to be updated: + # Paths to the output files: ztf_lightcurve_path = os.path.join(output_dir, f'{target_name:s}-ztf.ecsv') ztf_plot_path = os.path.join(output_dir, f'{target_name:s}-ztf.png') @@ -78,41 +83,48 @@ def main(): os.remove(ztf_plot_path) continue + if args.missing: + if os.path.isfile(ztf_lightcurve_path) and os.path.isfile(ztf_plot_path): + continue + # Download ZTF photometry as Astropy Table: - tab = ztf.download_ztf_photometry(tgt['targetid']) - logger.debug("ZTF Photometry:\n%s", tab) - if tab is None or len(tab) == 0: - if os.path.isfile(ztf_lightcurve_path): - os.remove(ztf_lightcurve_path) - if os.path.isfile(ztf_plot_path): - os.remove(ztf_plot_path) + try: + tab = ztf.download_ztf_photometry(tgt['targetid']) + logger.debug("ZTF Photometry:\n%s", tab) + if tab is None or len(tab) == 0: + if os.path.isfile(ztf_lightcurve_path): + os.remove(ztf_lightcurve_path) + if os.path.isfile(ztf_plot_path): + os.remove(ztf_plot_path) + continue + + # Write table to file: + tab.write(ztf_lightcurve_path, format='ascii.ecsv', delimiter=',', overwrite=True) # TODO: overwrite=True has not always been necessary, do we want to overwrite or not? + + # Find time of maxmimum and 14 days from that: + indx_min = np.argmin(tab['mag']) + maximum_mjd = tab['time'][indx_min] + fortnight_mjd = maximum_mjd + 14 + + # Get LC data out and save as CSV files + fig, ax = plt.subplots() + ax.axvline(maximum_mjd, ls='--', c='k', lw=0.5, label='Maximum') + ax.axvline(fortnight_mjd, ls='--', c='0.5', lw=0.5, label='+14 days') + for fid in np.unique(tab['photfilter']): + col = colors[fid] + band = tab[tab['photfilter'] == fid] + ax.errorbar(band['time'], band['mag'], band['mag_err'], color=col, ls='-', lw=0.5, marker='.', label=fid) + + ax.invert_yaxis() + ax.set_title(target_name) + ax.set_xlabel('Time (MJD)') + ax.set_ylabel('Magnitude') + ax.legend() + fig.savefig(ztf_plot_path, format='png', bbox_inches='tight') + plt.close(fig) + except: continue - # Write table to file: - tab.write(ztf_lightcurve_path, format='ascii.ecsv', delimiter=',') - - # Find time of maxmimum and 14 days from that: - indx_min = np.argmin(tab['mag']) - maximum_mjd = tab['time'][indx_min] - fortnight_mjd = maximum_mjd + 14 - - # Get LC data out and save as CSV files - fig, ax = plt.subplots() - ax.axvline(maximum_mjd, ls='--', c='k', lw=0.5, label='Maximum') - ax.axvline(fortnight_mjd, ls='--', c='0.5', lw=0.5, label='+14 days') - for fid in np.unique(tab['photfilter']): - col = colors[fid] - band = tab[tab['photfilter'] == fid] - ax.errorbar(band['time'], band['mag'], band['mag_err'], color=col, ls='-', lw=0.5, marker='.', label=fid) - - ax.invert_yaxis() - ax.set_title(target_name) - ax.set_xlabel('Time (MJD)') - ax.set_ylabel('Magnitude') - ax.legend() - fig.savefig(ztf_plot_path, format='png', bbox_inches='tight') - plt.close(fig) - # -------------------------------------------------------------------------------------------------- if __name__ == '__main__': diff --git a/run_ingest.py b/run_ingest.py index a271b0c..51330f0 100644 --- a/run_ingest.py +++ b/run_ingest.py @@ -13,6 +13,8 @@ import sys import os.path import glob +import subprocess +import warnings import numpy as np import astropy.coordinates as coords from astropy.table import Table @@ -22,6 +24,8 @@ import tempfile import re from collections import defaultdict +if os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows')) not in sys.path: + sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows'))) from flows.aadc_db import AADC_DB from flows.plots import plt, plot_image from flows.load_image import load_image @@ -69,8 +73,7 @@ def flows_get_archive_from_path(fname, archives_list=None): # -------------------------------------------------------------------------------------------------- def optipng(fpath): - os.system('optipng -preserve -quiet "%s"' % fpath) - + subprocess.run(['optipng', '-preserve', '-quiet', fpath]) # -------------------------------------------------------------------------------------------------- class CounterFilter(logging.Filter): @@ -106,8 +109,8 @@ def create_plot(filepath, target_coord=None, target_position=None): # -------------------------------------------------------------------------------------------------- def ingest_from_inbox(): - rootdir_inbox = '/flows/inbox' - rootdir = '/flows/archive' + rootdir_inbox = '/archive/inbox' + rootdir = '/archive/raw' logger = logging.getLogger(__name__) @@ -132,11 +135,13 @@ def ingest_from_inbox(): logger.info(fpath) # Find the uploadlog corresponding to this file: - db.cursor.execute("SELECT logid FROM flows.uploadlog WHERE uploadpath=%s;", - [os.path.relpath(fpath, rootdir_inbox)]) + db.cursor.execute("SELECT logid,dont_ingest_again FROM flows.uploadlog WHERE uploadpath=%s;", [os.path.relpath(fpath, rootdir_inbox)]) row = db.cursor.fetchone() if row is not None: uploadlogid = row['logid'] + if row['dont_ingest_again']: + logger.info("Skipping file") + continue else: uploadlogid = None logger.info("Uploadlog ID: %s", uploadlogid) @@ -144,8 +149,7 @@ def ingest_from_inbox(): # Only accept FITS file, or already compressed FITS files: if not fpath.endswith('.fits') and not fpath.endswith('.fits.gz'): if uploadlogid: - db.cursor.execute("UPDATE flows.uploadlog SET status='Invalid file type' WHERE logid=%s;", - [uploadlogid]) + db.cursor.execute("UPDATE flows.uploadlog SET status='Invalid file type',dont_ingest_again=TRUE WHERE logid=%s;", [uploadlogid]) db.conn.commit() logger.error("Invalid file type: %s", os.path.relpath(fpath, rootdir_inbox)) continue @@ -296,31 +300,51 @@ def ingest_from_inbox(): except Exception as e: # pragma: no cover logger.exception("Could not load FITS image") if uploadlogid: - errmsg = str(e) if hasattr(e, 'message') else str(e.message) + errmsg = str(e) if not hasattr(e, 'message') else str(e.message) db.cursor.execute("UPDATE flows.uploadlog SET status=%s WHERE logid=%s;", ['Load Image Error: ' + errmsg, uploadlogid]) db.conn.commit() continue # Use the WCS in the file to calculate the pixel-positon of the target: + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', category=RuntimeWarning, message='All-NaN slice encountered') try: target_pixels = img.wcs.all_world2pix(target_radec, 0).flatten() except: # noqa: E722, pragma: no cover logger.exception("Could not find target position using the WCS.") if uploadlogid: errmsg = "Could not find target position using the WCS." - db.cursor.execute("UPDATE flows.uploadlog SET status=%s WHERE logid=%s;", [errmsg, uploadlogid]) + db.cursor.execute("UPDATE flows.uploadlog SET status=%s,dont_ingest_again=TRUE WHERE logid=%s;", [errmsg, uploadlogid]) db.conn.commit() continue # Check that the position of the target actually falls within # the pixels of the image: - if target_pixels[0] < -0.5 or target_pixels[1] < -0.5 or target_pixels[0] > img.shape[1] - 0.5 or \ - target_pixels[1] > img.shape[0] - 0.5: + logger.debug("image.shape = %s", img.shape) + if len(img.shape) != 2: + errmsg = "Invalid image dimensions." + logger.error(errmsg) + if uploadlogid: + db.cursor.execute("UPDATE flows.uploadlog SET status=%s,dont_ingest_again=TRUE WHERE logid=%s;", [errmsg, uploadlogid]) + db.conn.commit() + continue + + logger.debug("target_pixels = %s", target_pixels) + if len(target_pixels) != 2 or np.any(~np.isfinite(target_pixels)): + errmsg = "Invalid extracted target position. Check the WCS." + logger.error(errmsg) + if uploadlogid: + db.cursor.execute("UPDATE flows.uploadlog SET status=%s,dont_ingest_again=TRUE WHERE logid=%s;", [errmsg, uploadlogid]) + db.conn.commit() + continue + + if target_pixels[0] < -0.5 or target_pixels[1] < -0.5 \ + or target_pixels[0] > img.shape[1]-0.5 or target_pixels[1] > img.shape[0]-0.5: logger.error("Target position does not fall within image. Check the WCS.") if uploadlogid: errmsg = "Target position does not fall within image. Check the WCS." - db.cursor.execute("UPDATE flows.uploadlog SET status=%s WHERE logid=%s;", [errmsg, uploadlogid]) + db.cursor.execute("UPDATE flows.uploadlog SET status=%s,dont_ingest_again=TRUE WHERE logid=%s;", [errmsg, uploadlogid]) db.conn.commit() continue @@ -348,10 +372,13 @@ def ingest_from_inbox(): # with the central value. This way of doing it is more forgiving. obstime = img.obstime.utc.mjd if inputtype != 'replace': - db.cursor.execute( - "SELECT fileid FROM flows.files WHERE targetid=%s AND datatype=%s AND site=%s AND photfilter=%s AND obstime BETWEEN %s AND %s;", - [targetid, datatype, img.site['siteid'], img.photfilter, obstime - 0.5 * img.exptime / 86400, - obstime + 0.5 * img.exptime / 86400, ]) + db.cursor.execute("SELECT fileid FROM flows.files WHERE targetid=%s AND datatype=%s AND site=%s AND photfilter=%s AND obstime BETWEEN %s AND %s;", [ + targetid, + datatype, + img.site['siteid'], + img.photfilter, + float(obstime - 0.5 * img.exptime/86400), + float(obstime + 0.5 * img.exptime / 86400), ]) if db.cursor.fetchone() is not None: logger.error("ALREADY DONE: Deep check") if uploadlogid: @@ -368,19 +395,27 @@ def ingest_from_inbox(): # Set file and directory permissions: # TODO: Can this not be handled in a more elegant way? - os.chmod(os.path.dirname(newpath), 0o2750) - os.chmod(newpath, 0o0440) + os.chmod(os.path.dirname(newpath), 0o2775) + os.chmod(newpath, 0o0444) filesize = os.path.getsize(fpath) if not fpath.endswith('-e00.fits'): create_plot(newpath, target_coord=target_coord, target_position=target_pixels) - db.cursor.execute( - "INSERT INTO flows.files (archive,path,targetid,datatype,site,filesize,filehash,obstime,photfilter,exptime,version,available) VALUES (%(archive)s,%(relpath)s,%(targetid)s,%(datatype)s,%(site)s,%(filesize)s,%(filehash)s,%(obstime)s,%(photfilter)s,%(exptime)s,%(version)s,1) RETURNING fileid;", - {'archive': archive, 'relpath': relpath, 'targetid': targetid, 'datatype': datatype, - 'site': img.site['siteid'], 'filesize': filesize, 'filehash': filehash, 'obstime': obstime, - 'photfilter': img.photfilter, 'exptime': img.exptime, 'version': version}) + db.cursor.execute("""INSERT INTO flows.files ( + archive,path,targetid,datatype,site,filesize,filehash,obstime,photfilter,exptime,version,available + ) + VALUES ( + %(archive)s,%(relpath)s,%(targetid)s,%(datatype)s,%(site)s,%(filesize)s,%(filehash)s,%(obstime)s,%(photfilter)s,%(exptime)s,%(version)s,1 + ) + RETURNING fileid;""", { + 'archive': archive, + 'relpath': relpath, + 'targetid': targetid, + 'datatype': datatype, + 'site': img.site['siteid'], 'filesize': filesize, 'filehash': filehash, 'obstime': float(obstime), + 'photfilter': img.photfilter, 'exptime': float(img.exptime), 'version': version}) fileid = db.cursor.fetchone()[0] if datatype == 4: @@ -395,11 +430,24 @@ def ingest_from_inbox(): [fileid, uploadlogid]) db.conn.commit() - except: # noqa: E722, pragma: no cover + + except (KeyboardInterrupt, SystemExit): + db.conn.rollback() + if os.path.exists(newpath): + os.remove(newpath) + logger.warning("Stopped by user or system") + return + + except Exception as e: # noqa: E722, pragma: no cover db.conn.rollback() if os.path.exists(newpath): os.remove(newpath) - raise + logger.exception("%s: Could not ingest file", fpath) + if uploadlogid: + msg = str(e) if not hasattr(e, 'message') else str(e.message) + db.cursor.execute("UPDATE flows.uploadlog SET status=%s WHERE logid=%s;", [msg, uploadlogid]) + db.conn.commit() + else: logger.info("DELETE THE ORIGINAL FILE") if os.path.isfile(newpath): @@ -411,8 +459,8 @@ def ingest_from_inbox(): # -------------------------------------------------------------------------------------------------- def ingest_photometry_from_inbox(): - rootdir_inbox = '/flows/inbox' - rootdir_archive = '/flows/archive_photometry' + rootdir_inbox = '/archive/inbox' + rootdir_archive = '/archive/photometry' logger = logging.getLogger(__name__) @@ -536,6 +584,7 @@ def ingest_photometry_from_inbox(): # Optimize all the PNG files in the temp directory: for f in glob.iglob(os.path.join(tmpdir, '*.png')): + logger.debug("Running optipng on %s", f) optipng(f) # Copy the full directory to its new home: @@ -546,9 +595,12 @@ def ingest_photometry_from_inbox(): filesize = os.path.getsize(newpath) filehash = get_filehash(newpath) - db.cursor.execute( - "INSERT INTO flows.files (archive,path,targetid,datatype,site,filesize,filehash,obstime,photfilter,version,available) VALUES (%(archive)s,%(relpath)s,%(targetid)s,%(datatype)s,%(site)s,%(filesize)s,%(filehash)s,%(obstime)s,%(photfilter)s,%(version)s,1) RETURNING fileid;", - {'archive': archive, 'relpath': relpath, 'targetid': targetid, 'datatype': 2, 'site': site, + db.cursor.execute("INSERT INTO flows.files (archive,path,targetid,datatype,site,filesize,filehash,obstime,photfilter,version,available) VALUES (%(archive)s,%(relpath)s,%(targetid)s,%(datatype)s,%(site)s,%(filesize)s,%(filehash)s,%(obstime)s,%(photfilter)s,%(version)s,1) RETURNING fileid;", { + 'archive': archive, + 'relpath': relpath, + 'targetid': targetid, + 'datatype': 2, + 'site': site, 'filesize': filesize, 'filehash': filehash, 'obstime': tab.meta['obstime-bmjd'], 'photfilter': tab.meta['photfilter'], 'version': new_version}) fileid = db.cursor.fetchone()[0] @@ -585,91 +637,91 @@ def ingest_photometry_from_inbox(): 'pipeline_version': tab.meta['version'], 'latest_version': new_version} db.cursor.execute("""INSERT INTO flows.photometry_details ( - fileid_phot, - fileid_img, - fileid_template, - fileid_diffimg, - obstime_bmjd, - mag_raw, - mag_raw_error, - mag_sub, - mag_sub_error, - zeropoint, - zeropoint_error, - zeropoint_diff, - fwhm, - seeing, - references_detected, - used_for_epsf, - faintest_reference_detected, - pipeline_version - ) VALUES ( - %(fileid_phot)s, - %(fileid_img)s, - %(fileid_template)s, - %(fileid_diffimg)s, - %(obstime)s, - %(mag_raw)s, - %(mag_raw_error)s, - %(mag_sub)s, - %(mag_sub_error)s, - %(zeropoint)s, - %(zeropoint_error)s, - %(zeropoint_diff)s, - %(fwhm)s, - %(seeing)s, - %(references_detected)s, - %(used_for_epsf)s, - %(faintest_reference_detected)s, - %(pipeline_version)s - );""", phot_summary) + fileid_phot, + fileid_img, + fileid_template, + fileid_diffimg, + obstime_bmjd, + mag_raw, + mag_raw_error, + mag_sub, + mag_sub_error, + zeropoint, + zeropoint_error, + zeropoint_diff, + fwhm, + seeing, + references_detected, + used_for_epsf, + faintest_reference_detected, + pipeline_version + ) VALUES ( + %(fileid_phot)s, + %(fileid_img)s, + %(fileid_template)s, + %(fileid_diffimg)s, + %(obstime)s, + %(mag_raw)s, + %(mag_raw_error)s, + %(mag_sub)s, + %(mag_sub_error)s, + %(zeropoint)s, + %(zeropoint_error)s, + %(zeropoint_diff)s, + %(fwhm)s, + %(seeing)s, + %(references_detected)s, + %(used_for_epsf)s, + %(faintest_reference_detected)s, + %(pipeline_version)s + );""", phot_summary) db.cursor.execute("SELECT * FROM flows.photometry_summary WHERE fileid_img=%s;", [fileid_img]) if db.cursor.fetchone() is None: db.cursor.execute("""INSERT INTO flows.photometry_summary ( - fileid_phot, - fileid_img, - fileid_template, - fileid_diffimg, - targetid, - obstime, - photfilter, - mag_raw, - mag_raw_error, - mag_sub, - mag_sub_error, - pipeline_version, - latest_version - ) VALUES ( - %(fileid_phot)s, - %(fileid_img)s, - %(fileid_template)s, - %(fileid_diffimg)s, - %(targetid)s, - %(obstime)s, - %(photfilter)s, - %(mag_raw)s, - %(mag_raw_error)s, - %(mag_sub)s, - %(mag_sub_error)s, - %(pipeline_version)s, - %(latest_version)s - );""", phot_summary) + fileid_phot, + fileid_img, + fileid_template, + fileid_diffimg, + targetid, + obstime, + photfilter, + mag_raw, + mag_raw_error, + mag_sub, + mag_sub_error, + pipeline_version, + latest_version + ) VALUES ( + %(fileid_phot)s, + %(fileid_img)s, + %(fileid_template)s, + %(fileid_diffimg)s, + %(targetid)s, + %(obstime)s, + %(photfilter)s, + %(mag_raw)s, + %(mag_raw_error)s, + %(mag_sub)s, + %(mag_sub_error)s, + %(pipeline_version)s, + %(latest_version)s + );""", phot_summary) else: db.cursor.execute("""UPDATE flows.photometry_summary SET - fileid_phot=%(fileid_phot)s, - targetid=%(targetid)s, - fileid_template=%(fileid_template)s, - fileid_diffimg=%(fileid_diffimg)s, - obstime=%(obstime)s, - photfilter=%(photfilter)s, - mag_raw=%(mag_raw)s, - mag_raw_error=%(mag_raw_error)s, - mag_sub=%(mag_sub)s, - mag_sub_error=%(mag_sub_error)s, - pipeline_version=%(pipeline_version)s, - latest_version=%(latest_version)s - WHERE fileid_img=%(fileid_img)s;""", phot_summary) + fileid_phot=%(fileid_phot)s, + targetid=%(targetid)s, + fileid_template=%(fileid_template)s, + fileid_diffimg=%(fileid_diffimg)s, + obstime=%(obstime)s, + photfilter=%(photfilter)s, + mag_raw=%(mag_raw)s, + mag_raw_error=%(mag_raw_error)s, + mag_sub=%(mag_sub)s, + mag_sub_error=%(mag_sub_error)s, + pipeline_version=%(pipeline_version)s, + latest_version=%(latest_version)s + WHERE fileid_img=%(fileid_img)s;""", phot_summary) # Update the photometry status to done: db.cursor.execute( @@ -682,19 +734,31 @@ def ingest_photometry_from_inbox(): db.conn.commit() - except: # noqa: E722, pragma: no cover + except (KeyboardInterrupt, SystemExit): + db.conn.rollback() + if newpath is not None and os.path.isdir(os.path.dirname(newpath)): + shutil.rmtree(os.path.dirname(newpath)) + logger.warning("Stopped by user or system") + return + + except Exception as e: # noqa: E722, pragma: no cover db.conn.rollback() if newpath is not None and os.path.isdir(os.path.dirname(newpath)): shutil.rmtree(os.path.dirname(newpath)) - raise + logger.exception("%s: Could not ingest file", fpath) + if uploadlogid: + msg = str(e) if not hasattr(e, 'message') else str(e.message) + db.cursor.execute("UPDATE flows.uploadlog SET status=%s WHERE logid=%s;", [msg, uploadlogid]) + db.conn.commit() + else: # Set file and directory permissions: # TODO: Can this not be handled in a more elegant way? - os.chmod(os.path.join(rootdir_archive, targetname), 0o2750) - os.chmod(os.path.join(rootdir_archive, targetname, f'{fileid_img:05d}'), 0o2750) - os.chmod(os.path.join(rootdir_archive, targetname, f'{fileid_img:05d}', f'v{new_version:02d}'), 0o2550) + os.chmod(os.path.join(rootdir_archive, targetname), 0o2775) + os.chmod(os.path.join(rootdir_archive, targetname, f'{fileid_img:05d}'), 0o2775) + os.chmod(os.path.join(rootdir_archive, targetname, f'{fileid_img:05d}', f'v{new_version:02d}'), 0o2555) for f in os.listdir(os.path.dirname(newpath)): - os.chmod(os.path.join(os.path.dirname(newpath), f), 0o0440) + os.chmod(os.path.join(os.path.dirname(newpath), f), 0o0444) logger.info("DELETE THE ORIGINAL FILE") if os.path.isfile(fpath): @@ -709,7 +773,7 @@ def cleanup_inbox(): """ Cleanup of inbox directory """ - rootdir_inbox = '/flows/inbox' + rootdir_inbox = '/archive/inbox' # Just a simple check to begin with: if not os.path.isdir(rootdir_inbox): @@ -727,13 +791,12 @@ def cleanup_inbox(): # Delete left-over files in the database tables, that have been removed from disk: with AADC_DB() as db: - db.cursor.execute("SELECT logid,uploadpath FROM flows.uploadlog WHERE uploadpath IS NOT NULL;") + db.cursor.execute("SELECT logid,uploadpath FROM flows.uploadlog WHERE uploadpath IS NOT NULL AND fileid IS NULL AND status!='File deleted';") for row in db.cursor.fetchall(): if not os.path.isfile(os.path.join(rootdir_inbox, row['uploadpath'])): - print("MARK AS DELETED IN DATABASE: " + row['uploadpath']) - db.cursor.execute("UPDATE flows.uploadlog SET uploadpath=NULL,status='File deleted' WHERE logid=%s;", - [row['logid']]) - db.conn.commit() + print(f"MARK AS DELETED IN DATABASE (logid={row['logid']:d}): " + row['uploadpath']) + db.cursor.execute("UPDATE flows.uploadlog SET status='File deleted' WHERE logid=%s;", [row['logid']]) + db.conn.commit() # -------------------------------------------------------------------------------------------------- @@ -760,10 +823,28 @@ def cleanup_inbox(): ingest_photometry_from_inbox() cleanup_inbox() + # Last time we warned: + report_errors = True + try: + with AADC_DB() as db: + db.cursor.execute("SELECT * FROM flows.ingest_reporting WHERE last_ingest_report > TIMEZONE('utc', NOW()) - '1 day'::interval LIMIT 1;") + last_ingest_report = db.cursor.fetchone() + if last_ingest_report is None: + db.cursor.execute("TRUNCATE flows.ingest_reporting;") + db.cursor.execute("INSERT INTO flows.ingest_reporting (last_ingest_report) VALUES (TIMEZONE('utc', NOW()));") + db.conn.commit() + report_errors = True + else: + report_errors = False + except: + db.conn.rollback() + report_errors = True + # Check the number of errors or warnings issued, and convert these to a return-code: - logcounts = counter.counter - if logcounts.get('ERROR', 0) > 0 or logcounts.get('CRITICAL', 0) > 0: - sys.exit(4) - elif logcounts.get('WARNING', 0) > 0: - sys.exit(3) + if report_errors: + logcounts = counter.counter + if logcounts.get('ERROR', 0) > 0 or logcounts.get('CRITICAL', 0) > 0: + sys.exit(4) + elif logcounts.get('WARNING', 0) > 0: + sys.exit(3) sys.exit(0) diff --git a/run_visibility.py b/run_visibility.py index 05934ab..a3d37b1 100644 --- a/run_visibility.py +++ b/run_visibility.py @@ -7,6 +7,9 @@ import argparse from flows.plots import plots_interactive import flows +from flows.target import Target +import re +import os.path if __name__ == '__main__': # Parse command line arguments: @@ -14,10 +17,21 @@ parser.add_argument('-t', '--target', type=str, help='TIC identifier of target.', nargs='?', default=2) parser.add_argument('-s', '--site', type=int, help='TIC identifier of target.', nargs='?', default=None) parser.add_argument('-d', '--date', type=str, help='TIC identifier of target.', nargs='?', default=None) - parser.add_argument('-o', '--output', type=str, help='TIC identifier of target.', nargs='?', default=None) + parser.add_argument('-o', '--output', type=str, help='Base output directory for visibility plots.', nargs='?', default=None) args = parser.parse_args() if args.output is None: plots_interactive() - flows.visibility(target=args.target, siteid=args.site, date=args.date, output=args.output) + t = Target.from_tname(args.target) + + output = args.output + if output is not None: + regex_year = re.compile(r'^(\d+)') + m = regex_year.match(t.name) + year = int(m.group(1)) + output = os.path.join(output, str(year), t.name) + if not os.path.exists(output): + os.makedirs(output) + + flows.visibility(target=t, siteid=args.site, date=args.date, output=output) diff --git a/tests/input/2020aatc/SN2020aatc_K_20201213_495s.fits.gz b/tests/input/2020aatc/SN2020aatc_K_20201213_495s.fits.gz deleted file mode 100644 index 0b00ece..0000000 --- a/tests/input/2020aatc/SN2020aatc_K_20201213_495s.fits.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:94a1bdcfbfd75da8c66f7d37271125353953acb16d087982d4241f05f9864c6a -size 2909171 diff --git a/tests/input/2020lao/59000.96584_h_e_20200531_33_1_1_1_2020lao_LT_gp.fits.gz b/tests/input/2020lao/59000.96584_h_e_20200531_33_1_1_1_2020lao_LT_gp.fits.gz deleted file mode 100644 index e5293a8..0000000 --- a/tests/input/2020lao/59000.96584_h_e_20200531_33_1_1_1_2020lao_LT_gp.fits.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:bed8ade4c361fcdeb651655d502614feb980e3d68276e4c9cbb39ea50f1fd5bc -size 5114792 diff --git a/tests/input/2020lao/subtracted/59000.96584_h_e_20200531_33_1_1_1_2020lao_LT_gpdiff.fits.gz b/tests/input/2020lao/subtracted/59000.96584_h_e_20200531_33_1_1_1_2020lao_LT_gpdiff.fits.gz deleted file mode 100644 index 6695207..0000000 --- a/tests/input/2020lao/subtracted/59000.96584_h_e_20200531_33_1_1_1_2020lao_LT_gpdiff.fits.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5089e5dfbb00b02dc0790b0f7fecca9a0a39d13579c5699889868c8cf2b21e70 -size 19851243 diff --git a/tests/input/2021wyw/ADP.2021-10-15T11_40_06.553.fits.gz b/tests/input/2021wyw/ADP.2021-10-15T11_40_06.553.fits.gz deleted file mode 100644 index 1640e1c..0000000 --- a/tests/input/2021wyw/ADP.2021-10-15T11_40_06.553.fits.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:578fdbf2be74097ab3c0c6cdd951062172221b8d23cab0f952352eb5ca678aa7 -size 44996903 diff --git a/tests/test_catalogs.py b/tests/test_catalogs.py index f309eaa..5a07579 100644 --- a/tests/test_catalogs.py +++ b/tests/test_catalogs.py @@ -13,6 +13,7 @@ import pytest from astropy.coordinates import SkyCoord from astropy.table import Table +from tendrils.utils import load_config from flows import catalogs @@ -46,7 +47,7 @@ def test_download_catalog(SETUP_CONFIG, ra: float = 256.727512, dec: float = 30. # Check if CasJobs have been configured, and skip the entire test if it isn't. # This has to be done like this, to avoid problems when config.ini doesn't exist. try: - catalogs.configure_casjobs() + catalogs.casjobs_configured(load_config()) except catalogs.CasjobsError: pytest.skip("CasJobs not configured") diff --git a/tests/test_photometry.py b/tests/test_photometry.py index b04185c..2e0252a 100644 --- a/tests/test_photometry.py +++ b/tests/test_photometry.py @@ -52,7 +52,6 @@ def test_photometry_mag_fast(photometry_fast): # ------------------ EVEN SLOWER TESTS ------------------------------------------------ -@pytest.mark.slow @pytest.fixture(scope='session', autouse=True) def photometry_slow_optical(fid=1446): result_table, tdir = run_phot(fid) diff --git a/tests/test_sites.py b/tests/test_sites.py index e0eba5e..0c967cd 100644 --- a/tests/test_sites.py +++ b/tests/test_sites.py @@ -78,7 +78,7 @@ def test_site_visibility(flows_sites): with pytest.deprecated_call(): with plt.ioff(): plotpaths = visibility( - target, date="2023-01-01", output=tempdir + target, date="20230101", output=tempdir ) assert not isinstance(plotpaths, plt.Axes)