Skip to content

Commit

Permalink
WIP
Browse files Browse the repository at this point in the history
  • Loading branch information
pothiers committed Sep 17, 2024
1 parent 28d3a81 commit 83a61e0
Show file tree
Hide file tree
Showing 5 changed files with 234 additions and 80 deletions.
154 changes: 87 additions & 67 deletions notebooks_tsqr/NightLog.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
{
"cell_type": "markdown",
"id": "0",
"metadata": {},
"metadata": {
"jp-MarkdownHeadingCollapsed": true
},
"source": [
"# Initialization"
]
Expand All @@ -28,7 +30,7 @@
"#!day_obs = 'TODAY' # TODO Change to 'TODAY' to test with default before push \n",
"\n",
"# Total number of days of data to display (ending on day_obs)\n",
"number_of_days = '12' # TODO Change to '1' to test with default before push "
"number_of_days = '2' # TODO Change to '1' to test with default before push "
]
},
{
Expand Down Expand Up @@ -111,7 +113,8 @@
" !pip install --upgrade git+https://github.com/lsst-ts/ts_logging_and_reporting.git@prototype >/dev/null\n",
"import lsst.ts.logging_and_reporting.source_adapters as sad\n",
"import lsst.ts.logging_and_reporting.almanac as alm\n",
"from lsst.ts.logging_and_reporting.reports import md,mdlist,dict_to_md"
"import lsst.ts.logging_and_reporting.reports as rep \n",
"from lsst.ts.logging_and_reporting.reports import md,mdlist"
]
},
{
Expand All @@ -131,8 +134,7 @@
" from lsst_efd_client import EfdClient\n",
" enable_efd = True\n",
"except:\n",
" enable_efd = False\n",
"\n"
" enable_efd = False"
]
},
{
Expand All @@ -144,12 +146,28 @@
]
},
{
"cell_type": "code",
"execution_count": null,
"cell_type": "markdown",
"id": "8",
"metadata": {},
"source": [
"The only environment that has everything needed for this page is\n",
"https://summit-lsp.lsst.codes\n",
"\n",
"However, Times Square **does not** run on the Summit. It **does** run on USDF-dev. USDF doesn't fully support all the data sources we need so some functionality is currently missing on this page."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9",
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Display overview of Report context \n",
"md(f'''\n",
"Report for **{date}** covering the previous **{days}** observing night(s).\n",
"- Run on logs from **{server}/**\n",
Expand All @@ -160,7 +178,7 @@
},
{
"cell_type": "markdown",
"id": "9",
"id": "10",
"metadata": {},
"source": [
"# Almanac"
Expand All @@ -169,17 +187,21 @@
{
"cell_type": "code",
"execution_count": null,
"id": "10",
"metadata": {},
"id": "11",
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"a = alm.Almanac()\n",
"pd.DataFrame(a.as_dict).T # TODO get rid of bogus header"
"# Display various almanac values (for moon, sun)\n",
"rep.AlmanacReport().almanac_as_dataframe()"
]
},
{
"cell_type": "markdown",
"id": "11",
"id": "12",
"metadata": {},
"source": [
"# Nightly Report"
Expand All @@ -188,10 +210,15 @@
{
"cell_type": "code",
"execution_count": null,
"id": "12",
"metadata": {},
"id": "13",
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Get data from Night Report log. Display nightly Jira BLOCKS.\n",
"nr_adapter = sad.NightReportAdapter(server_url=server)\n",
"nr_url = nr_adapter.source_url\n",
"try:\n",
Expand All @@ -203,26 +230,11 @@
"except Exception as err:\n",
" nr_recs = []\n",
" msg = f'ERROR getting records from {nr_url=}: {err=}'\n",
" raise Exception(msg)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "13",
"metadata": {},
"outputs": [],
"source": [
"print(f'Retrieved {len(nr_recs)} records from {nr_url}')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "14",
"metadata": {},
"outputs": [],
"source": [
" raise Exception(msg)\n",
"\n",
"# print(f'Retrieved {len(nr_recs)} records from {nr_url}')\n",
"\n",
"# Display Jira BLOCKS\n",
"front = 'https://rubinobs.atlassian.net/projects/BLOCK?selectedItem=com.atlassian.plugins.atlassian-connect-plugin:com.kanoah.test-manager__main-project-page#!/'\n",
"tickets = nr_adapter.nightly_tickets(nr_recs)\n",
"\n",
Expand All @@ -241,7 +253,7 @@
},
{
"cell_type": "markdown",
"id": "15",
"id": "14",
"metadata": {},
"source": [
"# Exposure Log"
Expand All @@ -250,10 +262,15 @@
{
"cell_type": "code",
"execution_count": null,
"id": "16",
"metadata": {},
"id": "15",
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Get data from Exposure log. Display time log.\n",
"exposure_adapter = sad.ExposurelogAdapter(server_url=server)\n",
"exposure_url = exposure_adapter.source_url\n",
"try:\n",
Expand All @@ -265,16 +282,8 @@
"except Exception as err:\n",
" exposure_recs = []\n",
" msg = f'ERROR getting records from {url=}: {err=}'\n",
" raise Exception(msg)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "17",
"metadata": {},
"outputs": [],
"source": [
" raise Exception(msg)\n",
"\n",
"if exposure_recs:\n",
" table = exposure_adapter.day_table(exposure_recs,'date_added', dayobs_field='day_obs')\n",
" #print(table)\n",
Expand All @@ -287,10 +296,15 @@
{
"cell_type": "code",
"execution_count": null,
"id": "18",
"metadata": {},
"id": "16",
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Display Observation gaps\n",
"if usdf == os.environ.get('EXTERNAL_INSTANCE_URL'):\n",
" md(f\"**Warning:** The `/exposures/` endpoint is not yet functional on SERVER=usdf.\", color='red')\n",
"gaps = exposure_adapter.get_observation_gaps(min_day_obs=min_day_obs,\n",
Expand All @@ -311,7 +325,7 @@
},
{
"cell_type": "markdown",
"id": "19",
"id": "17",
"metadata": {},
"source": [
"# Narrative Log\n"
Expand All @@ -320,18 +334,23 @@
{
"cell_type": "code",
"execution_count": null,
"id": "20",
"metadata": {},
"id": "18",
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Get data from Narrative log. Display time log.\n",
"narrative_adapter = sad.NarrativelogAdapter(server_url=server)\n",
"narrative_url = narrative_adapter.source_url\n",
"try:\n",
" # date like '2000-01-02 12:00:00'\n",
" # str(datetime(2000, 1, 2, 12, 0, 0))\n",
" min_date = str(datetime.strptime(min_day_obs,'%Y%m%d'))\n",
" max_date = str(datetime.strptime(max_day_obs,'%Y%m%d'))\n",
" print(f'Get data from {narrative_url}: {min_date} to {max_date}')\n",
" #!print(f'Get data from {narrative_url}: {min_date} to {max_date}')\n",
" narrative_recs,url = narrative_adapter.get_messages(\n",
" limit=limit,\n",
" min_date_end=min_date,\n",
Expand All @@ -342,17 +361,11 @@
" msg = f'ERROR getting records from {url}: {err=}'\n",
" raise Exception(msg)\n",
"\n",
"print(f'Retrieved {len(narrative_recs)} records.')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "21",
"metadata": {},
"outputs": [],
"source": [
"# print(f'Retrieved {len(narrative_recs)} records.')\n",
"\n",
"if narrative_recs:\n",
" md('Warning: Some text of Narrative log message may confuse markdown rendering.',\n",
" color='mediumblue')\n",
" table = narrative_adapter.day_table(narrative_recs, 'date_added')\n",
" #print(tabstr)\n",
" #mdlist(table, color=\"darkblue\")\n",
Expand All @@ -365,18 +378,25 @@
{
"cell_type": "code",
"execution_count": null,
"id": "22",
"metadata": {},
"id": "19",
"metadata": {
"jupyter": {
"source_hidden": true
}
},
"outputs": [],
"source": [
"# Conditionally display our current ability to connect to all needed endpoints.\n",
"if not os.environ.get('EXTERNAL_INSTANCE_URL'):\n",
" md('# Dashboard')\n",
" md('(This is not done when running under Times Square.)')\n",
" %run ./dashboard.ipynb"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "23",
"id": "20",
"metadata": {},
"outputs": [],
"source": []
Expand Down
16 changes: 14 additions & 2 deletions notebooks_tsqr/dashboard.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@
"metadata": {},
"outputs": [],
"source": [
"Dashboard().report()\n",
"score, working = Dashboard().report()\n",
"# On 9/12/2024 this gets two failed connects. \n",
"# The are usdf: exposurelog/instruments,exposurelog/exposures\n",
"# When usdfdev if fully functional, there should be zero failed connects."
"# When usdfdev if fully functional, there should be zero failed connects (with both VPNs active)."
]
},
{
Expand All @@ -29,6 +29,18 @@
"id": "2",
"metadata": {},
"outputs": [],
"source": [
"print(f'{score=:.0%}')\n",
"print('Servers that are fully functional for Logging and Reporting:')\n",
"print('\\t','\\n\\t'.join(working))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3",
"metadata": {},
"outputs": [],
"source": []
}
],
Expand Down
14 changes: 10 additions & 4 deletions python/lsst/ts/logging_and_reporting/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,18 @@ def report(self, timeout=None):
RETURN: percentage of good connectons.
"""
url_status = dict()
url_status = dict() # url_status[endpoint_url] = http _status_code
working = set() # Set of servers that work for all our required endpoints.

for env,server in self.envs.items():
server_all_good = True
for adapter in self.adapters:
service = adapter(server_url=server)
# url_status[endpoint_url] = http_status_code
url_status.update(service.check_endpoints(timeout=timeout))
stats, adapter_all_good = service.check_endpoints(timeout=timeout)
url_status.update(stats)
server_all_good &= adapter_all_good
if server_all_good:
working.add(server)

total_cnt = good_cnt = 0
good = list()
Expand Down Expand Up @@ -62,5 +68,5 @@ def report(self, timeout=None):
good_urls=good,
bad_ursl=bad,
)
return good_cnt/total_cnt
return good_cnt/total_cnt, working
# END: class Dashboard
Loading

0 comments on commit 83a61e0

Please sign in to comment.