Skip to content

Commit

Permalink
changes per PR review
Browse files Browse the repository at this point in the history
  • Loading branch information
pothiers committed Sep 24, 2024
1 parent 4165221 commit 5fc66b6
Show file tree
Hide file tree
Showing 5 changed files with 56 additions and 100 deletions.
82 changes: 23 additions & 59 deletions notebooks_tsqr/NightLog.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@
"import pandas as pd\n",
"from pprint import pp, pformat\n",
"from urllib.parse import urlencode\n",
"from IPython.display import display, Markdown\n",
"from matplotlib import pyplot as plt\n",
"import os\n",
"from datetime import datetime, date, timedelta\n",
Expand All @@ -58,7 +57,7 @@
"import lsst.ts.logging_and_reporting.almanac as alm\n",
"import lsst.ts.logging_and_reporting.reports as rep \n",
"import lsst.ts.logging_and_reporting.utils as ut\n",
"from lsst.ts.logging_and_reporting.reports import md,mdlist, NightlyLogReport\n",
"from lsst.ts.logging_and_reporting.reports import md,mdlist\n",
"\n",
"try:\n",
" import lsst.ts.logging_and_reporting.version\n",
Expand All @@ -77,7 +76,7 @@
"# Normalize Parameters (both explicit Times Squares params, in implicit ones)\n",
"limit = 500 # YAGNI for Auto get more if this isn't enough to get all requested DAYS\n",
"\n",
"date = ut.dos2dt(day_obs)\n",
"date = ut.get_datetime_from_day_obs_str(day_obs)\n",
"# date: is EXLUSIVE (upto, but not including)\n",
"days = int(number_of_days)\n",
"\n",
Expand Down Expand Up @@ -105,7 +104,7 @@
"\n",
"# The default provided here is for local testing.\n",
"# Under Times Square it is ignored.\n",
"server = os.environ.get('EXTERNAL_INSTANCE_URL', summit) # TODO try with \"usdf\" before push (else \"summit\")"
"server = os.environ.get('EXTERNAL_INSTANCE_URL', usdf) # TODO try with \"usdf\" before push (else \"summit\")"
]
},
{
Expand Down Expand Up @@ -141,8 +140,8 @@
"- Using *Prototype* Logging and Reporting Version: **{lrversion}**\n",
"''')\n",
"\n",
"ul = '\\n- '.join(['',*sad.all_endpoints(server)])\n",
"md(f'This report will attempt to use the following log sources: {ul}')"
"endpoint_urls_str = '\\n- '.join(['',*sad.all_endpoints(server)])\n",
"md(f'This report will attempt to use the following log sources: {endpoint_urls_str}')"
]
},
{
Expand All @@ -167,9 +166,7 @@
{
"cell_type": "markdown",
"id": "10",
"metadata": {
"jp-MarkdownHeadingCollapsed": true
},
"metadata": {},
"source": [
"# Almanac"
]
Expand Down Expand Up @@ -199,16 +196,6 @@
"id": "13",
"metadata": {},
"outputs": [],
"source": [
"min_day_obs, max_day_obs"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "14",
"metadata": {},
"outputs": [],
"source": [
"# Get data from Night Report log. Display nightly Jira BLOCKS.\n",
"nr_adapter = sad.NightReportAdapter(server_url=server,\n",
Expand All @@ -225,7 +212,7 @@
{
"cell_type": "code",
"execution_count": null,
"id": "15",
"id": "14",
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -245,13 +232,13 @@
" md(f'Used: [API Data]({nr_url})')\n",
"\n",
"# Display time log\n",
"nr_rep = NightlyLogReport(min_day_obs=min_day_obs, max_day_obs=max_day_obs)\n",
"nr_rep = rep.NightlyLogReport(min_day_obs=min_day_obs, max_day_obs=max_day_obs)\n",
"nr_rep.time_log_as_markdown(nr_adapter, nr_url)"
]
},
{
"cell_type": "markdown",
"id": "16",
"id": "15",
"metadata": {},
"source": [
"# Exposure Log"
Expand All @@ -260,7 +247,7 @@
{
"cell_type": "code",
"execution_count": null,
"id": "17",
"id": "16",
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -275,24 +262,19 @@
"exposure_url = status['endpoint_url']\n",
"rep.adapter_overview(exposure_adapter, status, limit)\n",
"\n",
"table = exposure_adapter.day_table('date_added', dayobs_field='day_obs')\n",
"mdlist(table)"
"# Display time log\n",
"exposure_rep = rep.ExposurelogReport(min_day_obs=min_day_obs, max_day_obs=max_day_obs)\n",
"exposure_rep.time_log_as_markdown(exposure_adapter, exposure_url)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "18",
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"id": "17",
"metadata": {},
"outputs": [],
"source": [
"# Display Observation gaps\n",
"if usdf == os.environ.get('EXTERNAL_INSTANCE_URL'):\n",
" md(f\"**Warning:** The `/exposures/` endpoint is not yet functional on SERVER=usdf.\", color='red')\n",
"gaps = exposure_adapter.get_observation_gaps()\n",
"if gaps:\n",
" md(f'### Date vs Observation Gap (minutes) for all Instruments')\n",
Expand All @@ -307,7 +289,7 @@
},
{
"cell_type": "markdown",
"id": "19",
"id": "18",
"metadata": {},
"source": [
"# Narrative Log\n"
Expand All @@ -316,17 +298,7 @@
{
"cell_type": "code",
"execution_count": null,
"id": "20",
"metadata": {},
"outputs": [],
"source": [
"min_day_obs, max_day_obs"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "21",
"id": "19",
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -341,21 +313,13 @@
"narrative_url = status['endpoint_url']\n",
"rep.adapter_overview(narrative_adapter, status, limit)\n",
"\n",
"if narrative_adapter.records:\n",
" md('Warning: Some text of Narrative log message may confuse markdown rendering.',\n",
" color='mediumblue')\n",
" table = narrative_adapter.day_table('date_added')\n",
" #print(tabstr)\n",
" #mdlist(table, color=\"darkblue\")\n",
" mdlist(table)\n",
"else:\n",
" md(f'No Narrative Log records found.', color='lightblue')\n",
" md(f'Used [API Data]({narrative_url})')"
"narrrative_rep = rep.NarrativelogReport(min_day_obs=min_day_obs, max_day_obs=max_day_obs)\n",
"narrrative_rep.time_log_as_markdown(narrative_adapter, narrative_url)"
]
},
{
"cell_type": "markdown",
"id": "22",
"id": "20",
"metadata": {},
"source": [
"# Developer Only Section"
Expand All @@ -364,7 +328,7 @@
{
"cell_type": "code",
"execution_count": null,
"id": "23",
"id": "21",
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -377,7 +341,7 @@
},
{
"cell_type": "markdown",
"id": "24",
"id": "22",
"metadata": {},
"source": [
"# Finale"
Expand All @@ -386,7 +350,7 @@
{
"cell_type": "code",
"execution_count": null,
"id": "25",
"id": "23",
"metadata": {},
"outputs": [],
"source": [
Expand Down
12 changes: 4 additions & 8 deletions python/lsst/ts/logging_and_reporting/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,14 +69,10 @@ def to_dict(self):
'msg': 'Input should be a valid integer, unable to parse string as an integer',
'input': '2024-09-21'}]}

class UnknownLogrep(BaseLogrepException):
"""Unknown LogRep error. If this is ever raised
create and use a new BaseLogrepException exception that is more
specific."""
error_code = 'UNKLR'

class BadStatus(BaseLogrepException):
"""Unknown LogRep error. If this is ever raised
create and use a new BaseLogrepException exception that is more
specific."""
"""Non-200 HTTP status from API endpoint. Typically
this will occur when a URL query string parameter is passed a value with
a bad format. It may also be that the Service is broken.
"""
error_code = 'BADQSTR'
13 changes: 6 additions & 7 deletions python/lsst/ts/logging_and_reporting/reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,12 @@
from IPython.display import display, Markdown
import pandas as pd
# Local Packages
from lsst.ts.logging_and_reporting.utils import datetime_to_dayobs
import lsst.ts.logging_and_reporting.almanac as alm

def md(markdown_str, color=None):
# see https://www.w3schools.com/colors/colors_names.asp
if color:
display(Markdown(f"### <font color='{color}'>{markdown_str}</font>"))
display(Markdown(f"<font color='{color}'>{markdown_str}</font>"))
else:
display(Markdown(markdown_str))

Expand All @@ -58,14 +57,14 @@ def dict_to_md(in_dict):
return md_list

def adapter_overview(adapter, status, limit):
cnt = status["number_of_records"]
count = status["number_of_records"]
error = status["error"]
more = '(There may be more.)' if cnt >= limit else ''
result = error if error else f'Got {cnt} records. '
mdlist([f'## Overview for Service: `{adapter.service}` [{cnt}]',
more = '(There may be more.)' if count >= limit else ''
result = error if error else f'Got {count} records. '
mdlist([f'## Overview for Service: `{adapter.service}` [{count}]',
f'- Endpoint: {status["endpoint_url"]}',
f'- {result} {more}',
])
print(f'- {result} {more}')


# TODO move all instances of "row_header", "row_str_func" from source_adapters to here.
Expand Down
37 changes: 15 additions & 22 deletions python/lsst/ts/logging_and_reporting/source_adapters.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,11 @@ def all_endpoints(server):
return list(endpoints)


def validate_response(response, url):
def validate_response(response, endpoint_url):
if response.status_code == 200:
return True
else:
# TODO Format for User
msg = f'Error: {response.json()} {url=}'
msg = f'Error: {response.json()} {endpoint_url=} {response.reason}'
raise ex.BadStatus(msg)

class SourceAdapter(ABC):
Expand All @@ -74,16 +73,16 @@ def __init__(self, *,
read_timeout=2, # seconds
):
if min_day_obs is None: # Inclusive
min_day_obs = ut.datetime_to_dayobs(
min_day_obs = ut.datetime_to_day_obs(
datetime.today() - timedelta(days=1))
if max_day_obs is None: # Exclusive
max_day_obs = ut.datetime_to_dayobs(
max_day_obs = ut.datetime_to_day_obs(
datetime.today() + timedelta(days=1))
self.server = server_url
self.min_day_obs = min_day_obs
self.max_day_obs = max_day_obs
self.min_date = ut.dos2dt(min_day_obs)
self.max_date = ut.dos2dt(max_day_obs)
self.min_date = ut.get_datetime_from_day_obs_str(min_day_obs)
self.max_date = ut.get_datetime_from_day_obs_str(max_day_obs)
self.limit = limit
self.offset = offset
self.c_timeout = min(MAX_CONNECT_TIMEOUT,
Expand Down Expand Up @@ -125,19 +124,12 @@ def day_table(self, datetime_field,
row_str_func=None,
zero_message=False,
):
#! def date_time(rec): TODO remove
#! if dayobs_field:
#! dt = datetime.strptime(str(rec[dayobs_field]), '%Y%m%d')
#! else:
#! dt = datetime.fromisoformat(rec[datetime_field])
#! return dt.replace(microsecond=0)

def obs_night(rec):
if 'day_obs' in rec:
return ut.day_obs_str(rec['day_obs']) # -> # "YYYY-MM-DD"
else:
dt = datetime.fromisoformat(rec[datetime_field])
return ut.datetime_to_dayobs(dt)
return ut.datetime_to_day_obs(dt)

def obs_date(rec):
dt = datetime.fromisoformat(rec[datetime_field])
Expand Down Expand Up @@ -272,13 +264,13 @@ def get_reports(self,
url = f'{self.server}/{self.service}/reports?{qstr}'
error = None
try:
r = requests.get(url, timeout=self.timeout)
validate_response(r, url)
recs = r.json()
response = requests.get(url, timeout=self.timeout)
validate_response(response, url)
recs = response.json()
recs.sort(key=lambda r: r['day_obs'])
except Exception as err:
recs = []
error = str(err)
error = f'{response.text=} Exception={err}'

self.keep_fields(recs, self.outfields)
self.records = recs
Expand All @@ -295,7 +287,6 @@ def nightly_tickets(self, recs):
ticket_url = r['confluence_url']
if ticket_url:
tickets[r['day_obs']].add(ticket_url)
#!return {k:list(v) for k,v in tickets.items()}
return {dayobs:list(urls) for dayobs,urls in tickets.items()}


Expand Down Expand Up @@ -348,10 +339,12 @@ def get_messages(self,
qparams['message_text'] = message_text
if self.min_day_obs:
qparams['min_date_added'] = datetime.combine(
self.min_date, time()).isoformat()
self.min_date, time()
).isoformat()
if self.max_day_obs:
qparams['max_date_added'] = datetime.combine(
self.max_date, time()).isoformat()
self.max_date, time()
).isoformat()
if self.limit:
qparams['limit'] = self.limit

Expand Down
Loading

0 comments on commit 5fc66b6

Please sign in to comment.