From f2a576537c9e4c00678628e4713d8db51363548e Mon Sep 17 00:00:00 2001 From: Steve Pothier Date: Fri, 11 Oct 2024 06:44:18 -0700 Subject: [PATCH] draft matching version 6 of storyboard --- notebooks_tsqr/NightLog.ipynb | 360 ++++++++++++------ .../ts/logging_and_reporting/all_sources.py | 22 +- .../lsst/ts/logging_and_reporting/reports.py | 3 +- .../logging_and_reporting/source_adapters.py | 80 +++- 4 files changed, 327 insertions(+), 138 deletions(-) diff --git a/notebooks_tsqr/NightLog.ipynb b/notebooks_tsqr/NightLog.ipynb index 986ef6c..ab855fd 100644 --- a/notebooks_tsqr/NightLog.ipynb +++ b/notebooks_tsqr/NightLog.ipynb @@ -5,7 +5,8 @@ "id": "0", "metadata": {}, "source": [ - "# Initialize" + "# Initialize\n", + "*(align with storyboard Version 6)*" ] }, { @@ -25,7 +26,7 @@ "# day_obs values: TODAY, YESTERDAY, YYYY-MM-DD\n", "# Report on observing nights that start upto but not included this day.\n", "#!day_obs = '2024-09-25' # Value to use for local testing (Summit)\n", - "day_obs = \"2024-09-24\" # TODO Change to 'YESTERDAY' to test with default before push\n", + "day_obs = \"2024-09-25\" # TODO Change to 'YESTERDAY' to test with default before push\n", "\n", "# Total number of days of data to display (ending on day_obs)\n", "number_of_days = \"1\" # TODO Change to '1' to test with default before push" @@ -79,9 +80,7 @@ "\n", " have_consdb = True\n", "except:\n", - " have_consdb = False\n", - "\n", - "print(f\"{have_consdb = }\")" + " have_consdb = False" ] }, { @@ -117,7 +116,11 @@ "cell_type": "code", "execution_count": null, "id": "4", - "metadata": {}, + "metadata": { + "jupyter": { + "source_hidden": true + } + }, "outputs": [], "source": [ "# Set default env to \"usdf\" and try before PUSH to repo.\n", @@ -136,11 +139,23 @@ "cell_type": "code", "execution_count": null, "id": "5", - "metadata": {}, + "metadata": { + "jupyter": { + "source_hidden": true + } + }, "outputs": [], "source": [ "# Read records from (almost) all sources\n", - "allsrc = AllSources(server_url=server)" + "allsrc = AllSources(\n", + " server_url=server,\n", + " min_dayobs=min_day_obs,\n", + " max_dayobs=max_day_obs,\n", + ")\n", + "\n", + "nr_rep = rep.NightlyLogReport(adapter=allsrc.nig_src)\n", + "exposure_rep = rep.ExposurelogReport(adapter=allsrc.exp_src)\n", + "narrative_rep = rep.NarrativelogReport(adapter=allsrc.nar_src)" ] }, { @@ -149,181 +164,166 @@ "metadata": {}, "source": [ "# Table of Contents\n", - "* [Overview of NightLog Report](#overview)\n", - "* [DDV](#ddv)\n", + "(TODO: update to reflect new content and ordering change)\n", "* [Almanac](#almanac)\n", + "* [Summary plots of whole night](#Summary-Plots)\n", + "* [Summary Scalars for the night](#Summary-Scalars)\n", "* [Night Report](#Night-Report)\n", + "* [Jira Tickets](#Jira-Tickets)\n", + "* [BLOCKS Observed](#BLOCKS-Observed)\n", "* [Exposure Log](#Exposure-Log)\n", - "* [Narrative Log](#Narrative-Log)" - ] - }, - { - "cell_type": "markdown", - "id": "7", - "metadata": {}, - "source": [ - "# Overview \n", - "" + "* [Narrative Log](#Narrative-Log)\n", + "* [Specific Plots](#Specific-Plots)\n", + "* [Developer Only](#dev-only)" ] }, { "cell_type": "code", "execution_count": null, - "id": "8", + "id": "7", "metadata": {}, "outputs": [], "source": [ - "# Display overview of Report context\n", + "instrum_str = \", \".join(list(allsrc.exp_src.instruments.keys()))\n", "md(\n", - " f\"\"\"## Project-Wide Night(s) Report \n", - "- Run on logs and databases from **{server}/**\n", - "- Report **{days} observing night(s)** with the last reported night starting on **{date}**.\n", - "- This report will include available data from noon **{min_date}** to noon **{max_date}**.\n", - "- Using ***Prototype* Logging and Reporting** Version: **{lrversion}**\n", - "\"\"\"\n", + " f\"# Showing data for **{min_date.date()}** to **{max_date.date()}** for {instrum_str}\"\n", ")" ] }, { "cell_type": "markdown", - "id": "9", + "id": "8", "metadata": {}, "source": [ - "### This report uses the following data sources\n", - "- NightReport\n", - "- Exposurelog\n", - "- Narrativelog\n", - "- EFD\n", - "- ConsDB\n", - "- (DDV)\n", - "- (Almanac from Astroplan)" + "\n", + "## Almanac" ] }, { - "cell_type": "markdown", - "id": "10", + "cell_type": "code", + "execution_count": null, + "id": "9", "metadata": {}, + "outputs": [], "source": [ - "\n", - "# DDV " + "# Display various almanac values (for moon, sun)\n", + "rep.AlmanacReport().day_obs_report(min_day_obs)" ] }, { "cell_type": "code", "execution_count": null, - "id": "11", - "metadata": {}, + "id": "10", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, "outputs": [], "source": [ - "DDV = (\n", - " f\"{server}/rubintv-dev/ddv/index.html\"\n", - " if \"summit\" in server\n", - " else f\"{server}/rubintv/ddv/index.html\"\n", + "# Load all the data sources\n", + "allsrc = AllSources(\n", + " server_url=server,\n", + " max_dayobs=max_day_obs,\n", + " min_dayobs=min_day_obs,\n", ")\n", - "md(f\"Access DDV part of RubinTV: {DDV}\")" + "nr_rep = rep.NightlyLogReport(adapter=allsrc.nig_src)" ] }, { "cell_type": "markdown", - "id": "12", + "id": "11", "metadata": {}, "source": [ - "\n", - "# Almanac" + "\n", + "## Summary plots of whole night " ] }, { - "cell_type": "code", - "execution_count": null, - "id": "13", + "cell_type": "markdown", + "id": "12", "metadata": {}, - "outputs": [], "source": [ - "# Display various almanac values (for moon, sun)\n", - "rep.AlmanacReport().day_obs_report(min_day_obs)" + "(content not yet defined in storyboard)" ] }, { "cell_type": "code", "execution_count": null, - "id": "14", + "id": "13", "metadata": {}, "outputs": [], "source": [ - "# Load all the data sources\n", - "allsrc = AllSources(\n", - " server_url=server,\n", - " max_dayobs=max_day_obs,\n", - " min_dayobs=min_day_obs,\n", - ")" + "# Exposure Tally\n", + "md('')\n", + "md(f\"## Summary Scalars for the night \")\n", + "md(\"(all available instruments)\")\n", + "tally = await allsrc.night_tally_observation_gaps()\n", + "if tally:\n", + " display(pd.DataFrame(tally))" ] }, { "cell_type": "markdown", - "id": "15", + "id": "14", "metadata": {}, "source": [ - "# Night Report " + "## Night Report " ] }, { "cell_type": "code", "execution_count": null, - "id": "16", + "id": "15", "metadata": {}, "outputs": [], "source": [ "# NR Report\n", - "nr_rep = rep.NightlyLogReport(adapter=allsrc.nig_src)\n", "\n", - "# Overview\n", - "nr_rep.overview()\n", + "# Display time log\n", + "nr_rep.time_log_as_markdown()\n", + "md(\"-------------\")\n", + "md(\"(TODO: better Jira tickets, BLOCKS observed)\")\n", + "\n", "\n", "# Display Jira BLOCKS\n", "front = \"https://rubinobs.atlassian.net/projects/BLOCK?selectedItem=com.atlassian.plugins.atlassian-connect-plugin:com.kanoah.test-manager__main-project-page#!/\"\n", "tickets = allsrc.nig_src.nightly_tickets()\n", "if tickets:\n", - " mdstr = \"## Nightly Jira BLOCKs\"\n", + " mdstr = \"#### Nightly Jira BLOCKs\"\n", " for day, url_list in tickets.items():\n", " mdstr += f\"\\n- {day}\"\n", " for ticket_url in url_list:\n", " mdstr += f'\\n - [{ticket_url.replace(front,\"\")}]({ticket_url})'\n", " md(mdstr)\n", "else:\n", - " md(f\"No jira BLOCK tickets found.\", color=\"lightblue\")\n", - " md(f\"Used: [API Data]({allsrc.nig_src.source_url})\")\n", - "\n", - "# Display time log\n", - "nr_rep.time_log_as_markdown()\n", - "md(\"-------------\")" + " md(f\"No jira BLOCK tickets found using: [API Data]({allsrc.nig_src.source_url})\")" ] }, { "cell_type": "markdown", - "id": "17", + "id": "16", "metadata": {}, "source": [ - "# Exposure Log" + "## Exposure Log" ] }, { "cell_type": "code", "execution_count": null, - "id": "18", - "metadata": {}, + "id": "17", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, "outputs": [], "source": [ "# Exposure Report\n", - "exposure_rep = rep.ExposurelogReport(adapter=allsrc.exp_src)\n", "\n", - "# Overview\n", - "exposure_rep.overview()\n", - "\n", - "# Exposure Tally\n", - "md(f\"## Exposure Tally for all Instruments\")\n", - "tally = await allsrc.night_tally_observation_gaps()\n", - "if tally:\n", - " display(pd.DataFrame(tally))\n", + "# Time Log\n", + "exposure_rep.time_log_as_markdown()\n", + "md(\"-------------\")\n", "\n", "# Observation gaps\n", "gaps = allsrc.exp_src.get_observation_gaps()\n", @@ -338,34 +338,57 @@ " df.plot.bar(x=\"day\", y=\"minutes\", title=f\"{instrument=!s}\")\n", "else:\n", " md(f\"No Observation Gaps found.\", color=\"lightblue\")\n", - " md(f\"Used: [API Data]({allsrc.exp_src.source_url})\")\n", - "\n", - "# Time Log\n", - "exposure_rep.time_log_as_markdown()\n", - "md(\"-------------\")" + " md(f\"Used: [API Data]({allsrc.exp_src.source_url})\")" ] }, { - "cell_type": "markdown", - "id": "19", - "metadata": {}, + "cell_type": "code", + "execution_count": null, + "id": "18", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, + "outputs": [], "source": [ - "# Narrative Log\n" + "allsrc.min_dayobs, allsrc.max_dayobs, allsrc.server_url" ] }, { "cell_type": "code", "execution_count": null, + "id": "19", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, + "outputs": [], + "source": [ + "[len(recs) for recs in allsrc.exp_src.exposures.values()]" + ] + }, + { + "cell_type": "markdown", "id": "20", "metadata": {}, + "source": [ + "## Narrative Log\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "21", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, "outputs": [], "source": [ "# Narrative Report\n", - "narrative_rep = rep.NarrativelogReport(adapter=allsrc.nar_src)\n", - "\n", - "# Overview\n", - "narrative_rep.overview\n", - "\n", "# Time Log\n", "narrative_rep.time_log_as_markdown()\n", "md(\"-------------\")" @@ -373,15 +396,101 @@ }, { "cell_type": "markdown", - "id": "21", + "id": "22", "metadata": {}, "source": [ - "# Developer Only Section" + "\n", + "## Specific Plots \n", + "(content not yet defined in storyboard)" ] }, { "cell_type": "markdown", - "id": "22", + "id": "23", + "metadata": {}, + "source": [ + "\n", + "# Developer Only Section \n", + "Contains stuff only expected to be useful to developers.\n", + "\n", + "May also contain sections that have moved out of he user section because they are not defined in the Storyboard." + ] + }, + { + "cell_type": "markdown", + "id": "24", + "metadata": {}, + "source": [ + "## Overview \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "25", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, + "outputs": [], + "source": [ + "# Display overview of Report context\n", + "md(\n", + " f\"\"\"## Project-Wide Night(s) Report \n", + "- Run on logs and databases from **{server}/**\n", + "- Report **{days} observing night(s)** with the last reported night starting on **{date}**.\n", + "- This report will include available data from noon **{min_date}** to noon **{max_date}**.\n", + "- Using ***Prototype* Logging and Reporting** Version: **{lrversion}**\n", + "\"\"\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "26", + "metadata": {}, + "source": [ + "## This report uses the following data sources\n", + "- NightReport\n", + "- Exposurelog\n", + "- Narrativelog\n", + "- EFD\n", + "- ConsDB\n", + "- (DDV)\n", + "- (Almanac from Astroplan)" + ] + }, + { + "cell_type": "markdown", + "id": "27", + "metadata": {}, + "source": [ + "## DDV " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "28", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, + "outputs": [], + "source": [ + "DDV = (\n", + " f\"{server}/rubintv-dev/ddv/index.html\"\n", + " if \"summit\" in server\n", + " else f\"{server}/rubintv/ddv/index.html\"\n", + ")\n", + "md(f\"Access DDV part of RubinTV: {DDV}\")" + ] + }, + { + "cell_type": "markdown", + "id": "29", "metadata": {}, "source": [ "## Where was this run?\n", @@ -394,10 +503,33 @@ "However, Times Square does not run on the Summit. It does run on USDF-dev.\n" ] }, + { + "cell_type": "markdown", + "id": "30", + "metadata": {}, + "source": [ + "## Section overviews moved here" + ] + }, { "cell_type": "code", "execution_count": null, - "id": "23", + "id": "31", + "metadata": {}, + "outputs": [], + "source": [ + "# Night Report Overview\n", + "nr_rep.overview()\n", + "# Exposure Report Overview\n", + "exposure_rep.overview()\n", + "# Narrative Report Overview\n", + "narrative_rep.overview()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "32", "metadata": {}, "outputs": [], "source": [ @@ -410,17 +542,21 @@ }, { "cell_type": "markdown", - "id": "24", + "id": "33", "metadata": {}, "source": [ - "# Finale" + "## Finale" ] }, { "cell_type": "code", "execution_count": null, - "id": "25", - "metadata": {}, + "id": "34", + "metadata": { + "jupyter": { + "source_hidden": true + } + }, "outputs": [], "source": [ "print(f\"Finished {str(dt.datetime.now())}\")" diff --git a/python/lsst/ts/logging_and_reporting/all_sources.py b/python/lsst/ts/logging_and_reporting/all_sources.py index c320811..f38709a 100644 --- a/python/lsst/ts/logging_and_reporting/all_sources.py +++ b/python/lsst/ts/logging_and_reporting/all_sources.py @@ -40,6 +40,8 @@ def __init__( ) # This space for rent by ConsDB + self.server_url = server_url + # Get the common min/max date/dayobs from just one source. # They are the same for all of them. self.max_date = self.nig_src.max_date @@ -66,7 +68,7 @@ def __init__( # day_obs:: YYYMMDD (int or str) # Use almanac begin of night values for day_obs. # Use almanac end of night values for day_obs + 1. - async def night_tally_observation_gaps(self, verbose=True): + async def night_tally_observation_gaps(self, verbose=False): instrument_tally = dict() # d[instrument] = tally_dict almanac = alm.Almanac(dayobs=self.min_dayobs) @@ -85,6 +87,12 @@ async def night_tally_observation_gaps(self, verbose=True): num_slews = targets[["slewTime"]].astype(bool).sum(axis=0).squeeze() total_slew_seconds = targets[["slewTime"]].sum().squeeze() + # per Merlin: There is no practical way to get actual detector read + # time. He has done some experiments and inferred that it is + # 2.3 seconds. He recommends hardcoding the value. + mean_detector_hrs = 2.3 / (60 * 60.0) + + # Scot says care only about: ComCam, LSSTCam and Latiss for instrument, records in self.exp_src.exposures.items(): exposure_seconds = 0 for rec in records: @@ -92,28 +100,30 @@ async def night_tally_observation_gaps(self, verbose=True): end = dt.datetime.fromisoformat(rec["timespan_end"]) exposure_seconds += (end - begin).total_seconds() num_exposures = len(records) + detector_hrs = len(records) * mean_detector_hrs + exposure_hrs = exposure_seconds / (60 * 60.0) - slew_hrs = total_slew_seconds / (60 * 60) + slew_hrs = total_slew_seconds / (60 * 60.0) idle_hrs = ( total_observable_hrs - exposure_hrs # - detector_read_hrs - slew_hrs ) + instrument_tally[instrument] = { "Total Night (HH:MM:SS)": hhmmss(total_observable_hrs), # (a) "Total Exposure (HH:MM:SS)": hhmmss(exposure_hrs), # (b) "Number of exposures": num_exposures, # (c) "Number of slews": num_slews, # (d) - "Total Detector Read (HH:MM:SS)": "NA", # (e) UNKNOWN SOURCE - "Mean Detector Read (HH:MM:SS)": "NA", # (f=e/c) + "Total Detector Read (HH:MM:SS)": hhmmss(detector_hrs), # (e) + # Next: (f=e/c) + "Mean Detector Read (HH:MM:SS)": hhmmss(mean_detector_hrs), "Total Slew (HH:MM:SS)": hhmmss(slew_hrs), # (g) "Mean Slew (HH:MM:SS)": hhmmss(slew_hrs / num_slews), # (g/d) "Total Idle (HH:MM:SS)": hhmmss(idle_hrs), # (i=a-b-e-g) } - # get_detector_reads()?? # UNKNOWN SOURCE - # Composition to combine Exposure and Efd (blackboard) # ts_xml/.../sal_interfaces/Scheduler/Scheduler_Events.xml # https://ts-xml.lsst.io/sal_interfaces/Scheduler.html#slewtime diff --git a/python/lsst/ts/logging_and_reporting/reports.py b/python/lsst/ts/logging_and_reporting/reports.py index 7b42b57..98c7f99 100644 --- a/python/lsst/ts/logging_and_reporting/reports.py +++ b/python/lsst/ts/logging_and_reporting/reports.py @@ -81,7 +81,7 @@ def overview(self): more = "(There may be more.)" if count >= adapter.limit else "" result = error if error else f"Got {count} records. " - print(md(f"## Overview for Service: `{adapter.service}` [{count}]")) + print(md(f"### Overview for Service: `{adapter.service}` [{count}]")) print(md(f'- Endpoint: {status["endpoint_url"]}')) print(f"- {result} {more}") @@ -96,7 +96,6 @@ def time_log_as_markdown( service = adapter.service url = adapter.get_status().get("endpoint_url") if records: - md("## Time Log") table = self.source_adapter.day_table("date_added") mdlist(table) else: diff --git a/python/lsst/ts/logging_and_reporting/source_adapters.py b/python/lsst/ts/logging_and_reporting/source_adapters.py index aa76a1c..d8a291e 100644 --- a/python/lsst/ts/logging_and_reporting/source_adapters.py +++ b/python/lsst/ts/logging_and_reporting/source_adapters.py @@ -139,6 +139,7 @@ def row_str_func(self, datetime_str, rec): msg = rec["message_text"].strip() return f"`{datetime_str}`\n```\n{msg}\n```" + # ABC def day_table( self, datetime_field, @@ -321,7 +322,9 @@ def telescope(rec): table.append(f"{self.row_str_func(attrstr, rec)}") crew_list = rec.get("observers_crew", []) crew_str = ", ".join(crew_list) - table.append(f"*Observer Crew: {crew_str}*") + status = rec.get("telescope_status", "NA") + table.append(f"Telescope Status: *{status}*") + table.append(f"Observer Crew: *{crew_str}*") return table def row_str_func(self, datetime_str, rec): @@ -574,13 +577,14 @@ def __init__( # status[endpoint] = dict(endpoint_url, number_of_records, error) self.status = dict() - self.instruments = list() # [instrument, ...] + self.instruments = dict() # dict[instrument] = registry + self.exposures = dict() # dd[instrument] = [rec, ...] # Load the data (records) we need from relevant endpoints # in dependency order. self.status["instruments"] = self.get_instruments() - for instrument in self.instruments: + for instrument in self.instruments.keys(): endpoint = f"exposures.{instrument}" self.status[endpoint] = self.get_exposures(instrument) if self.min_date: @@ -589,8 +593,8 @@ def __init__( @property def row_header(self): return ( - "| Time | OBS ID | Telescope | Message |\n" - "|------|--------|-----------|---------|" + "| Time | OBS ID | Instrument | Message |\n" + "|------|--------|------------|---------|" ) def row_str_func(self, datetime_str, rec): @@ -603,6 +607,47 @@ def row_str_func(self, datetime_str, rec): f"\n```\n{msg}\n```" ) + # Exposurelog + def day_table( + self, + datetime_field, + dayobs_field=None, + row_str_func=None, + zero_message=False, + ): + """Break on INSTRUMENT, DATE. Within that only show time.""" + + def obs_night(rec): + if "day_obs" in rec: + return ut.dayobs_str(rec["day_obs"]) # -> # "YYYY-MM-DD" + else: + dt = datetime.fromisoformat(rec[datetime_field]) + return ut.datetime_to_dayobs(dt) + + def obs_date(rec): + dt = datetime.fromisoformat(rec[datetime_field]) + return dt.replace(microsecond=0) + + def instrument(rec): + return rec["instrument"] + + recs = self.records + if len(recs) == 0: + if zero_message: + print("Nothing to display.") + return + + table = list() + # Sort by INSTRUMENT, then by OBS_DATE. + recs = sorted(recs, key=obs_date) + recs = sorted(recs, key=instrument) + for instrum, g0 in itertools.groupby(recs, key=instrument): + table.append(f"### Instrument: {instrum}") + for rec in g0: + attrstr = f'{str(obs_date(rec))} {rec.get("user_id")}' + table.append(f"{self.row_str_func(attrstr, rec)}") + return table + def check_endpoints(self, timeout=None, verbose=True): to = timeout or self.timeout if verbose: @@ -635,9 +680,11 @@ def get_instruments(self): except Exception as err: error = str(err) else: - # Flatten the lists - vals = recs.values() - self.instruments = list(itertools.chain.from_iterable(vals)) + self.instruments = { + instrum: int(reg.replace("butler_instruments_", "")) + for reg, inst_list in recs.items() + for instrum in inst_list + } status = dict( endpoint_url=url, number_of_records=len(recs), @@ -647,8 +694,9 @@ def get_instruments(self): # RETURNS status: dict[endpoint_url, number_of_records, error] # SIDE-EFFECT: puts records in self.exposures - def get_exposures(self, instrument, registry=1): - qparams = dict(instrument=instrument, registery=registry) + def get_exposures(self, instrument, verbose=False): + registry = self.instruments[instrument] + qparams = dict(instrument=instrument, registry=registry) if self.min_dayobs: qparams["min_day_obs"] = ut.dayobs_int(self.min_dayobs) if self.max_dayobs: @@ -657,6 +705,8 @@ def get_exposures(self, instrument, registry=1): recs = [] error = None try: + if verbose: + print(f"DBG get_exposures {url=}") recs = requests.get(url, timeout=self.timeout).json() except Exception as err: error = str(err) @@ -722,19 +772,13 @@ def get_records( ) return status - def get_observation_gaps(self, instruments=None): + def get_observation_gaps(self): def day_func(r): return r["day_obs"] - if not instruments: - instruments = self.instruments - # TODO user specified list of instruments must be subset - assert isinstance( - instruments, list - ), f'"instruments" must be a list. Got {instruments!r}' # inst_day_rollup[instrument] => dict[day] => exposureGapInMinutes inst_day_rollup = defaultdict(dict) # Instrument/Day rollup - for instrum in instruments: + for instrum in self.instruments.keys(): recs = self.exposures[instrum] instrum_gaps = dict() for day, dayrecs in itertools.groupby(recs, key=day_func):