Skip to content

Commit

Permalink
add "lost scripts" from kasoc@zion
Browse files Browse the repository at this point in the history
  • Loading branch information
Erik Jensen committed Sep 26, 2024
1 parent 581aa54 commit bb4fcf8
Show file tree
Hide file tree
Showing 5 changed files with 442 additions and 0 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -147,3 +147,6 @@ tests/output/
typings/

*.config

# Downloaded at run-time in flows_mwmap.py:
flows_mwmap_milkyway.fits
120 changes: 120 additions & 0 deletions fill_photometry_details.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import os.path
import getpass
import numpy as np
from astropy.table import Table
from astropy.units import Quantity
import sys
if sys.path[0] != os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows')):
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows')))
from flows.aadc_db import AADC_DB
from tendrils.utils import load_config

if __name__ == '__main__':
with AADC_DB() as db:

#db.cursor.execute("SELECT fileid,path,targetid FROM flows.files LEFT JOIN flows.photometry_details d ON d.fileid_phot=files.fileid WHERE files.datatype=2 AND d.fileid_phot IS NULL;")
#
#for row in db.cursor.fetchall():
#
# fileid_phot = row['fileid']
# filepath = os.path.join('/flows/archive_photometry/', row['path'])
#
# tab = Table.read(filepath, format='ascii.ecsv')
#
# indx_raw = (tab['starid'] == 0)
# indx_sub = (tab['starid'] == -1)
# indx_ref = (tab['starid'] > 0)
#
# phot_details = {
# 'fileid_phot': fileid_phot,
# 'fileid_img': int(tab.meta['fileid']),
# 'fileid_template': tab.meta['template'],
# 'fileid_diffimg': None if 'diffimg' not in tab.meta else tab.meta['diffimg'],
# 'obstime': tab.meta['obstime-bmjd'],
# 'mag_raw': float(tab[indx_raw]['mag']),
# 'mag_raw_error': float(tab[indx_raw]['mag_error']),
# 'mag_sub': None if not any(indx_sub) else float(tab[indx_sub]['mag']),
# 'mag_sub_error': None if not any(indx_sub) else float(tab[indx_sub]['mag_error']),
# 'zeropoint': float(tab.meta['zp']),
# 'zeropoint_error': None if 'zp_error' not in tab.meta else float(tab.meta['zp_error']),
# 'zeropoint_diff': None if 'zp_diff' not in tab.meta else float(tab.meta['zp_diff']),
# 'fwhm': None if 'fwhm' not in tab.meta else tab.meta['fwhm'],
# 'seeing': None if 'seeing' not in tab.meta else tab.meta['seeing'],
# 'references_detected': int(np.sum(indx_ref)),
# 'used_for_epsf': int(np.sum(tab['used_for_epsf'])),
# 'faintest_reference_detected': float(np.max(tab[indx_ref]['mag'])),
# 'pipeline_version': tab.meta['version'],
# }
#
# for key in ('fwhm', 'seeing'):
# if isinstance(phot_details[key], Quantity):
# phot_details[key] = float(phot_details[key].value)
#
# print(phot_details)
# print(row['targetid'])
#
# db.cursor.execute("""INSERT INTO flows.photometry_details (
# fileid_phot,
# fileid_img,
# fileid_template,
# fileid_diffimg,
# obstime_bmjd,
# mag_raw,
# mag_raw_error,
# mag_sub,
# mag_sub_error,
# zeropoint,
# zeropoint_error,
# zeropoint_diff,
# fwhm,
# seeing,
# references_detected,
# used_for_epsf,
# faintest_reference_detected,
# pipeline_version
# ) VALUES (
# %(fileid_phot)s,
# %(fileid_img)s,
# %(fileid_template)s,
# %(fileid_diffimg)s,
# %(obstime)s,
# %(mag_raw)s,
# %(mag_raw_error)s,
# %(mag_sub)s,
# %(mag_sub_error)s,
# %(zeropoint)s,
# %(zeropoint_error)s,
# %(zeropoint_diff)s,
# %(fwhm)s,
# %(seeing)s,
# %(references_detected)s,
# %(used_for_epsf)s,
# %(faintest_reference_detected)s,
# %(pipeline_version)s
# );""", phot_details)
#
#db.conn.commit()

db.cursor.execute("SELECT fileid,path,targetid FROM flows.files LEFT JOIN flows.photometry_details d ON d.fileid_phot=files.fileid WHERE files.datatype=2 AND d.faintest_reference_detected='NaN'::real;")
for row in db.cursor.fetchall():

fileid_phot = row['fileid']
filepath = os.path.join('/data/flows/archive/archive_photometry/', row['path'])

tab = Table.read(filepath, format='ascii.ecsv')
print(len(tab))

indx_ref = (tab['starid'] > 0)

frd = float(np.nanmax(tab[indx_ref]['mag']))
if np.isnan(frd):
frd = None

print(fileid_phot, frd)

db.cursor.execute("UPDATE flows.photometry_details SET faintest_reference_detected=%s WHERE fileid_phot=%s;", [frd, fileid_phot])

db.conn.commit()
33 changes: 33 additions & 0 deletions fix_filehash.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import os.path
import getpass
from flows.aadc_db import AADC_DB
from flows.utilities import get_filehash
from tendrils.utils import load_config


if __name__ == '__main__':
config = load_config()

with AADC_DB() as db:

db.cursor.execute("SELECT * FROM flows.files WHERE targetid=8;")

for row in db.cursor.fetchall():
if row['path'].endswith('.fits.gz'):
continue

p = row['path'].replace('.fits', '.fits.gz')

fpath = os.path.join('/data/flows/archive/archive', p)
try:
fsize = os.path.getsize(fpath)
fhash = get_filehash(fpath)
print(p)
print(fsize)
print(fhash)

db.cursor.execute("UPDATE flows.files SET path=%s,filehash=%s,filesize=%s WHERE fileid=%s;", [p, fhash, fsize, row['fileid']])
db.conn.commit()
#break
except FileNotFoundError:
print(f'File {fpath} not found')
159 changes: 159 additions & 0 deletions flows_create_visibility_plots.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import argparse
import logging
import os
import datetime
import shutil
import glob
import re
import requests
import urllib3
import ssl
import sys
from tqdm import tqdm
if os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows')) not in sys.path:
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'flows')))
import flows
from flows.target import Target
from flows.aadc_db import AADC_DB
from tendrils import api

#---------
# https://stackoverflow.com/questions/66689696/urllib3-error-ssl-wrong-signature-type
class SslOldHttpAdapter(requests.adapters.HTTPAdapter):
def init_poolmanager(self, connections, maxsize, block=False):
ctx = ssl.create_default_context()
ctx.set_ciphers('DEFAULT@SECLEVEL=1')
self.poolmanager = urllib3.poolmanager.PoolManager(
ssl_version=ssl.PROTOCOL_TLS,
ssl_context=ctx)

#---------
if __name__ == '__main__':

# Parse command line arguments:
parser = argparse.ArgumentParser(description='Download ZTF photometry.')
parser.add_argument('-d', '--debug', help='Print debug messages.', action='store_true')
parser.add_argument('-q', '--quiet', help='Only report warnings and errors.', action='store_true')
parser.add_argument('--fast', action='store_true')
args = parser.parse_args()

# Set logging level:
logging_level = logging.INFO
if args.quiet:
logging_level = logging.WARNING
elif args.debug:
logging_level = logging.DEBUG

# Setup logging:
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
console = logging.StreamHandler()
console.setFormatter(formatter)
logger = logging.getLogger('flows')
if not logger.hasHandlers():
logger.addHandler(console)
logger.setLevel(logging_level)

today = datetime.datetime.utcnow()
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(hours=24)
yesterday = datetime.datetime.utcnow() - datetime.timedelta(hours=24)

# Automatically reject targets that are more than 2 months old:



with AADC_DB() as db:
db.cursor.execute("""UPDATE flows.targets SET target_status='rejected' WHERE
target_status='candidate'
AND inserted < (NOW() at time zone 'utc') - '3 months'::interval;""") # TODO: Change to last_modified!
n_rejected = db.cursor.rowcount
db.conn.commit()
logger.info("%d targets automatically rejected.", n_rejected)

# Sort the targets by when they were inserted:
targets = api.get_targets()
targets = sorted(targets, key=lambda x: x['inserted'])[::-1]

tqdm_settings = {'disable': None if logger.isEnabledFor(logging.INFO) else True}

# Start requests session and allow older SSL settings
# since sdss.org is not that modern:
ses = requests.Session()
ses.mount('https://skyserver.sdss.org', SslOldHttpAdapter())

regex_year = re.compile(r'^(\d+)')
for tgt in tqdm(targets, **tqdm_settings):

m = regex_year.match(tgt['target_name'])
year = int(m.group(1))

outdir = os.path.join('/data/flows/archive/candidates', str(year), tgt['target_name'])

# If running in fast mode, jump over everything
if args.fast and (tgt['inserted'] < yesterday or os.path.isdir(outdir)):
continue

if tgt['target_status'] not in ('candidate', 'target'):
# Cleanup any leftover directories:
if os.path.isdir(outdir):
logger.info("Deleting old directory for %s...", tgt['target_name'])
shutil.rmtree(outdir)
continue

logger.info("Creating plots for %s...", tgt['target_name'])
os.makedirs(outdir, exist_ok=True)

tgtobj = Target.from_tid(tgt['targetid'])

# FIXME: Limiting to only siteid=2 (VLT) right now!
flows.visibility(target=tgtobj, siteid=2, date=today.strftime('%Y-%m-%d'), output=outdir, overwrite=False)

# Create plots for tomorrow already:
flows.visibility(target=tgtobj, siteid=2, date=tomorrow.strftime('%Y-%m-%d'), output=outdir, overwrite=False)

regex_fname = re.compile(r'^visibility_%s_(\d+)_site\d+\.png$' % tgt['target_name'])
for f in glob.iglob(os.path.join(outdir, 'visibility_*.png')):
bname = os.path.basename(f)

m = regex_fname.match(bname)
if m:
file_time = datetime.datetime.strptime(m.group(1), '%Y%m%d')
file_age = file_time - today

if file_age < datetime.timedelta(hours=-48):
logger.info("Deleting old file: %s", bname)
os.remove(f)

else:
logger.warning("Deleting non-matching file: %s", f)
os.remove(f)

#------------------------------------------------------------------------------------------

filename = tgt['target_name'] + '_sdss_thumb.jpg'
filepath = os.path.join(outdir, filename)
if not os.path.isfile(filepath):
logger.info("Downloading SDSS thumbnail for %s...", tgt['target_name'])
params = {
'ra': tgt['ra'],
'dec': tgt['decl'],
'scale': 0.8, # 1.79224,
'width': 150,
'height': 150,
'opt': 'GI'
}

r = ses.get('https://skyserver.sdss.org/dr16/SkyServerWS/ImgCutout/getjpeg',
params=params, stream=True)
if r.status_code == 404: # This is allowed!
if os.path.isfile(filepath):
os.remove(filepath)
else:
r.raise_for_status()

# Save the image to disk:
with open(filepath, 'wb') as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
Loading

0 comments on commit bb4fcf8

Please sign in to comment.