Skip to content

Commit

Permalink
Merge pull request #289 from sot/no-sybase
Browse files Browse the repository at this point in the history
Update mica to use ska_dbi sqsh
  • Loading branch information
jeanconn authored Feb 13, 2024
2 parents 45efdbd + 748204c commit cbaccf4
Show file tree
Hide file tree
Showing 12 changed files with 147 additions and 97 deletions.
22 changes: 11 additions & 11 deletions mica/archive/aca_l0.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from itertools import count
from pathlib import Path

import Ska.DBI
import ska_dbi
import Ska.arc5gl
from Chandra.Time import DateTime
import Ska.File
Expand Down Expand Up @@ -411,7 +411,7 @@ def _get_file_records(start, stop=None, slots=None,
'AND imgsize in (%s) '
'order by filetime asc '
% (tstart, tstart_pad, tstop, tstart, slot_str, imgsize_str))
with Ska.DBI.DBI(**db) as db:
with ska_dbi.DBI(**db) as db:
files = db.fetchall(db_query)
return files

Expand Down Expand Up @@ -456,7 +456,7 @@ def __init__(self,
# db_sql = os.path.join(os.environ['SKA_DATA'],
# 'mica', sql_def)
# db_init_cmds = file(db_sql).read()
# db = Ska.DBI.DBI(dbi='sqlite', server=db_file,
# db = ska_dbi.DBI(dbi='sqlite', server=db_file,
# autocommit=False)
# db.execute(db_init_cmds, commit=True)
# year_dirs = sorted(glob(
Expand Down Expand Up @@ -490,7 +490,7 @@ def _get_missing_archive_files(self, start, only_new=False):
missing = []
# for the entries after the start date, see if we have the
# file or a later version
with Ska.DBI.DBI(**self.db) as db:
with ska_dbi.DBI(**self.db) as db:
for file, idx in zip(ingested_files[-backcnt:], count(0)):
filename = file['filename']
db_match = db.fetchall(
Expand Down Expand Up @@ -579,7 +579,7 @@ def _read_archfile(self, i, f, archfiles):
:param i: index of file f within list of files archfiles
:param f: filename
:param archfiles: list of filenames for this batch
:param db: database handle for file lookup database (Ska.DBI handle)
:param db: database handle for file lookup database (ska_dbi handle)
:returns: info for a file.
:rtype: dictionary
Expand All @@ -588,7 +588,7 @@ def _read_archfile(self, i, f, archfiles):
# Check if filename is already in file lookup table
# If so then delete temporary file and abort further processing.
filename = os.path.basename(f)
with Ska.DBI.DBI(**self.db) as db:
with ska_dbi.DBI(**self.db) as db:
if db.fetchall('SELECT filename FROM archfiles WHERE filename=?',
(filename,)):
logger.debug(
Expand Down Expand Up @@ -622,7 +622,7 @@ def _read_archfile(self, i, f, archfiles):
archfiles_row['rows'] = len(hdu.data)
hdus.close()

with Ska.DBI.DBI(**self.db) as db:
with ska_dbi.DBI(**self.db) as db:
# remove old versions of this file
oldmatches = db.fetchall(
"""SELECT * from archfiles
Expand Down Expand Up @@ -666,7 +666,7 @@ def _read_archfile(self, i, f, archfiles):
return archfiles_row

def _arch_remove(self, defunct_matches):
with Ska.DBI.DBI(**self.db) as db:
with ska_dbi.DBI(**self.db) as db:
for file_record in defunct_matches:
query = ("""delete from archfiles
WHERE filetime = %(filetime)d
Expand Down Expand Up @@ -769,7 +769,7 @@ def _insert_files(self, files):
arch_info = self._read_archfile(i, f, files)
if arch_info:
self._move_archive_files([f])
with Ska.DBI.DBI(**self.db) as db:
with ska_dbi.DBI(**self.db) as db:
db.insert(arch_info, 'archfiles')
db.commit()
count_inserted += 1
Expand All @@ -793,14 +793,14 @@ def update(self):
% self.sql_def)
db_sql = Path(__file__).parent / self.sql_def
db_init_cmds = open(db_sql).read()
with Ska.DBI.DBI(**self.db) as db:
with ska_dbi.DBI(**self.db) as db:
db.execute(db_init_cmds, commit=True)
if self.start:
datestart = DateTime(self.start)
else:
# Get datestart as the most-recent file time from archfiles table
# will need min-of-max-slot-datestart
with Ska.DBI.DBI(**self.db) as db:
with ska_dbi.DBI(**self.db) as db:
last_time = min([db.fetchone(
"select max(filetime) from archfiles where slot = %d"
% s)['max(filetime)'] for s in range(0, 8)])
Expand Down
12 changes: 7 additions & 5 deletions mica/archive/asp_l1_proc.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
import gzip
from pathlib import Path

import Ska.DBI
import ska_dbi
from ska_dbi.sqsh import Sqsh
from Ska.File import get_globfiles

from mica.common import MICA_ARCHIVE
Expand Down Expand Up @@ -42,11 +43,11 @@ def update(obsids, config=None):
config['sql_def']))
db_sql = Path(__file__).parent / config['sql_def']
db_init_cmds = open(db_sql).read()
proc_db = Ska.DBI.DBI(dbi='sqlite', server=proc_db_file)
proc_db = ska_dbi.DBI(dbi='sqlite', server=proc_db_file)
proc_db.execute(db_init_cmds)
else:
proc_db = Ska.DBI.DBI(dbi='sqlite', server=proc_db_file)
archdb = Ska.DBI.DBI(dbi='sqlite',
proc_db = ska_dbi.DBI(dbi='sqlite', server=proc_db_file)
archdb = ska_dbi.DBI(dbi='sqlite',
server=os.path.join(config['data_root'],
'archfiles.db3'))
for obs in obsids:
Expand All @@ -70,7 +71,8 @@ def update(obsids, config=None):
hdus = fits.open(sol)
obi = hdus[1].header['OBI_NUM']
revision = hdus[1].header['REVISION']
with Ska.DBI.DBI(**apstat_db) as db:

with Sqsh(**apstat_db) as db:
aspect_1 = db.fetchall("""SELECT * FROM aspect_1
WHERE obsid = {obsid}
AND obi = {obi}
Expand Down
29 changes: 14 additions & 15 deletions mica/archive/obsid_archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
from pathlib import Path

import Ska.arc5gl
import Ska.DBI
import ska_dbi
from ska_dbi.sqsh import Sqsh
from Chandra.Time import DateTime
import Ska.File
from astropy.table import Table
Expand Down Expand Up @@ -131,8 +132,6 @@ def set_env(self):
self._arc5 = Ska.arc5gl.Arc5gl()
self._apstat = dict(dbi='sybase', server='sqlsao',
database='axafapstat')
self._aca_db = dict(dbi='sybase', server='sybase',
user='aca_read')
config = self.config
db_file = os.path.join(os.path.abspath(config['data_root']),
'archfiles.db3')
Expand All @@ -143,7 +142,7 @@ def set_env(self):
% config['sql_def'])
db_sql = Path(__file__).parent / config['sql_def']
db_init_cmds = open(db_sql).read()
with Ska.DBI.DBI(dbi='sqlite', server=db_file,
with ska_dbi.DBI(dbi='sqlite', server=db_file,
autocommit=False) as db:
db.execute(db_init_cmds, commit=True)
db = dict(dbi='sqlite', server=db_file,
Expand Down Expand Up @@ -235,7 +234,7 @@ def _get_file_records(self, obsid=None, start=None, stop=None,
else:
db_query += 'AND revision = %d ' % revision
db_query += "order by tstart"
with Ska.DBI.DBI(**self._archfiles_db) as db:
with ska_dbi.DBI(**self._archfiles_db) as db:
files = db.fetchall(db_query)
# For the special case of "revision = last" without obsid, filter the results one a per-file
# basis by obsid (could be multiple obsids in the date range)
Expand Down Expand Up @@ -339,7 +338,7 @@ def get_ver_num(self, obsid, version='default'):
arc5.sendline("cd %s" % tempdir)
arc5.sendline("obsid=%d" % obsid)

with Ska.DBI.DBI(**apstat) as db:
with Sqsh(**apstat) as db:
obis = db.fetchall(
"select distinct obi from obidet_0_5 where obsid = %d" % obsid)
if len(obis) > 1:
Expand Down Expand Up @@ -468,7 +467,7 @@ def get_arch(self, obsid, version='last'):
logger.info("retrieving data for %d in %s" % (obsid, tempdir))
arc5.sendline("obsid=%d" % obsid)
# if multi-obi, limit to just the first obi
with Ska.DBI.DBI(**apstat) as db:
with Sqsh(**apstat) as db:
obis = db.fetchall(
"select distinct obi from obidet_0_5 where obsid = %d" % obsid)
if len(obis) > 1:
Expand All @@ -487,7 +486,7 @@ def get_arch(self, obsid, version='last'):
archfiles = glob(os.path.join(tempdir, "*"))
if not archfiles:
raise ValueError("Retrieved no files")
with Ska.DBI.DBI(**self._archfiles_db) as db:
with ska_dbi.DBI(**self._archfiles_db) as db:
existing = db.fetchall(
"select * from archfiles where obsid = %d and revision = '%s'"
% (obsid, version))
Expand Down Expand Up @@ -544,7 +543,7 @@ def update_link(self, obsid):
obs_ln_last = os.path.join(archive_dir, chunk_dir,
"%05d_last" % obsid)

with Ska.DBI.DBI(**self._archfiles_db) as db:
with ska_dbi.DBI(**self._archfiles_db) as db:
if default_ver is not None:
def_ver_dir = os.path.join(archive_dir, chunk_dir,
'%05d_v%02d' % (obsid, default_ver))
Expand Down Expand Up @@ -689,7 +688,7 @@ def update(self):
order by %(apstat_id)s"""
% query_vars)
logger.debug(apstat_query)
with Ska.DBI.DBI(**apstat) as db:
with Sqsh(**apstat) as db:
todo = db.fetchall(apstat_query)
for obs in todo:
logger.info("running get_arch for obsid %d run on %s"
Expand Down Expand Up @@ -738,7 +737,7 @@ def update(self):
prov_data = self.get_todo_from_links(archive_dir)
for obs in prov_data:
# check again for multi-obis and limit to first one
with Ska.DBI.DBI(**apstat) as db:
with ska_dbi.DBI(**apstat) as db:
obis = db.fetchall(
"select distinct obi from obidet_0_5 where obsid = %d"
% obs['obsid'])
Expand All @@ -755,7 +754,7 @@ def update(self):
and revision = %(revision)d"""
% query_vars)
logger.debug(apstat_query)
with Ska.DBI.DBI(**apstat) as db:
with Sqsh(**apstat) as db:
current_status = db.fetchall(apstat_query)
if len(current_status) == 0:
logger.warning(
Expand All @@ -770,7 +769,7 @@ def update(self):
# a query to get the quality from the max science_2 data that
# used this aspect_solution. Ugh.
if config['apstat_table'] == 'aspect_1':
with Ska.DBI.DBI(**apstat) as db:
with Sqsh(**apstat) as db:
science_qual = db.fetchall(
"""select quality from science_2 where science_2_id in (
select science_2_id from science_2_obi where science_1_id in (
Expand All @@ -787,10 +786,10 @@ def update(self):
else:
# if there are no science_2 entries at all for the obsid
# see if it is discarded
with Ska.DBI.DBI(dbi='sybase', server='sqlsao', database='axafocat') as db:
with Sqsh(dbi='sybase', server='sqlsao', database='axafocat') as db:
target_status = db.fetchone(
"select status from target where obsid = {}".format(obs['obsid']))
if target_status['status'] == 'discarded':
if target_status and target_status['status'] == 'discarded':
logger.info("Skipping {}, obsid 'discarded'".format(
obs['obsid']))
continue
Expand Down
86 changes: 69 additions & 17 deletions mica/archive/tests/test_asp_l1.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,20 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
from pathlib import Path
import tempfile

import numpy as np
import pytest
import ska_dbi
from astropy.table import Table
from kadi import events
from Quaternion import Quat, normalize
from Ska.engarchive import fetch
from kadi import events
import pytest
from testr import test_helper

from .. import asp_l1, obsid_archive

from .. import asp_l1
HAS_L1_ARCHIVE = Path(asp_l1.CONFIG["data_root"]).exists()

HAS_L1_ARCHIVE = os.path.exists(asp_l1.CONFIG['data_root'])

def compare_obc_and_asol(atts, times, recs, ptol=2, ytol=2, rtol=65):
"""
Expand All @@ -24,10 +30,11 @@ def compare_obc_and_asol(atts, times, recs, ptol=2, ytol=2, rtol=65):
:param rtol: dq quaternion roll tolerance in arcsecs
"""
for ai in recs:
telem = fetch.Msidset(['aoattqt*'], ai['TSTART'], ai['TSTOP'])
obc_atts = np.vstack([telem['aoattqt{}'.format(idx)].vals
for idx in [1, 2, 3, 4]]).transpose()
obc_times = telem['aoattqt1'].times
telem = fetch.Msidset(["aoattqt*"], ai["TSTART"], ai["TSTOP"])
obc_atts = np.vstack(
[telem["aoattqt{}".format(idx)].vals for idx in [1, 2, 3, 4]]
).transpose()
obc_times = telem["aoattqt1"].times
# Test that, at the obc times, the onboard solution and the ground solution
# are reasonably close
idxs = np.searchsorted(times[:-1], obc_times)
Expand All @@ -47,16 +54,17 @@ def compare_obc_and_asol(atts, times, recs, ptol=2, ytol=2, rtol=65):
assert np.all(np.abs(drs) < rtol)


@pytest.mark.skipif('not HAS_L1_ARCHIVE', reason='Test requires L1 archive')
@pytest.mark.skipif("not HAS_L1_ARCHIVE", reason="Test requires L1 archive")
@pytest.mark.parametrize("obsid", [14333, 15175, 5438, 2121])
def test_get_atts_obsid(obsid):
atts, times, recs = asp_l1.get_atts(obsid=obsid)
compare_obc_and_asol(atts, times, recs)

@pytest.mark.skipif('not HAS_L1_ARCHIVE', reason='Test requires L1 archive')

@pytest.mark.skipif("not HAS_L1_ARCHIVE", reason="Test requires L1 archive")
def test_get_atts_time():
start = '2014:001:00:00:00.000'
stop = '2014:005:00:00:00.000'
start = "2014:001:00:00:00.000"
stop = "2014:005:00:00:00.000"
atts, times, recs = asp_l1.get_atts(start=start, stop=stop)
assert len(atts) == len(times)
compare_obc_and_asol(atts, times, recs)
Expand All @@ -66,14 +74,58 @@ def test_get_atts_time():
continue
# check that more than 90% of the kalman interval is in times fetched from get_atts
ok = (times < dwell.tstop) & (times > dwell.tstart)
assert (times[ok][-1] - times[ok][0]) > dwell.dur * .90
assert (times[ok][-1] - times[ok][0]) > dwell.dur * 0.90
# also assert that the number of ~.25sec samples works out
assert (len(times[ok]) * .25625) > dwell.dur * .90
assert (len(times[ok]) * 0.25625) > dwell.dur * 0.90


@pytest.mark.skipif("not test_helper.on_head_network()", reason="Not on HEAD network")
def test_update_l1_archive(tmp_path):

config = asp_l1.CONFIG.copy()
config["data_root"] = tmp_path / "asp1"
config["temp_root"] = tmp_path / "temp"
config["bad_obsids"] = tmp_path / "asp1" / "asp_l1_bad_obsids.dat"
config["firstrun"] = True
config["rebuild"] = True
config["obsid"] = 1
config["version"] = 4
archive = obsid_archive.ObsArchive(config)
obsids = archive.update()

with ska_dbi.DBI(
dbi="sqlite", server=config["data_root"] / "archfiles.db3"
) as db:
dat = Table(db.fetchall("select * from archfiles"))
dat.sort("filename")
assert dat[
"filename",
"filetime",
"ascdsver",
"caldbver",
"content",
"revision",
"obsid",
].pformat_all() == (
[
" filename filetime ascdsver caldbver content revision obsid",
"--------------------------------- -------- -------- -------- ---------- -------- -----",
" pcadf059904356N004_acal1.fits.gz 59904356 8.3.2.1 4.3.0 ACACAL 4 1",
" pcadf059904356N004_acen1.fits.gz 59904356 8.3.2.1 4.3.0 ACACENT 4 1",
"pcadf059904356N004_aqual1.fits.gz 59904356 8.3.2.1 4.3.0 ASPQUAL 4 1",
" pcadf059904356N004_asol1.fits.gz 59904356 8.3.2.1 4.3.0 ASPSOL 4 1",
" pcadf059904356N004_bpix1.fits.gz 59904356 8.3.2.1 4.3.0 ACA_BADPIX 4 1",
"pcadf059904356N004_fidpr1.fits.gz 59904356 8.3.2.1 4.3.0 FIDPROPS 4 1",
" pcadf059904356N004_gcal1.fits.gz 59904356 8.3.2.1 4.3.0 GYROCAL 4 1",
" pcadf059904356N004_gspr1.fits.gz 59904356 8.3.2.1 4.3.0 GSPROPS 4 1",
]
)


@pytest.mark.skipif('not HAS_L1_ARCHIVE', reason='Test requires L1 archive')
@pytest.mark.skipif("not HAS_L1_ARCHIVE", reason="Test requires L1 archive")
def test_get_atts_filter():
# Obsid 19039 has a momentum dump that shows up in asp_sol_status
atts, times, recs = asp_l1.get_atts(obsid=19039)
uf_atts, uf_times, uf_recs = asp_l1.get_atts(obsid=19039, filter=False)
# Confirm that approximately 212 seconds are filtered
assert np.abs((len(uf_atts) - len(atts)) * .25625 - 212.2) < 5
assert np.abs((len(uf_atts) - len(atts)) * 0.25625 - 212.2) < 5
Loading

0 comments on commit cbaccf4

Please sign in to comment.