diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fd37f258..acb6c4df 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,6 +22,6 @@ repos: name: isort (python) - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.0.278 + rev: v0.3.4 hooks: - - id: ruff \ No newline at end of file + - id: ruff diff --git a/pyproject.toml b/pyproject.toml index 0d87849f..06e8df62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ name = "rubin-sim" description = "Scheduler, survey strategy analysis, and other simulation tools for Rubin Observatory." readme = "README.md" license = { text = "GPL" } -classifiers = [ +classifiers = [ "Intended Audience :: Science/Research", "Operating System :: OS Independent", "Programming Language :: Python :: 3", @@ -143,4 +143,3 @@ max-doc-length = 79 [tool.ruff.lint.pydocstyle] convention = "numpy" - diff --git a/rubin_sim/maf/batches/glance_batch.py b/rubin_sim/maf/batches/glance_batch.py index 1c2d99d7..fa9005ec 100644 --- a/rubin_sim/maf/batches/glance_batch.py +++ b/rubin_sim/maf/batches/glance_batch.py @@ -106,9 +106,12 @@ def glanceBatch( bundle_list.append(bundle) # Total effective exposure time - metric = metrics.TeffMetric(m5_col=colmap["fiveSigmaDepth"], filter_col=colmap["filter"], normed=True) + metric = metrics.MeanMetric(col="t_eff") + teff_stacker = stackers.TeffStacker(normed=True) for sql in sql_per_and_all_filters: - bundle = metric_bundles.MetricBundle(metric, slicer, sql, display_dict=displayDict) + bundle = metric_bundles.MetricBundle( + metric, slicer, sql, stacker_list=[teff_stacker], display_dict=displayDict + ) bundle_list.append(bundle) # Number of observations, all and each filter diff --git a/rubin_sim/maf/batches/visitdepth_batch.py b/rubin_sim/maf/batches/visitdepth_batch.py index 50b5459c..ca10ae9d 100644 --- a/rubin_sim/maf/batches/visitdepth_batch.py +++ b/rubin_sim/maf/batches/visitdepth_batch.py @@ -251,12 +251,7 @@ def tEffMetrics( displayDict = {"group": "T_eff Summary", "subgroup": subgroup} displayDict["caption"] = "Total effective time of the survey (see Teff metric)." displayDict["order"] = 0 - metric = metrics.TeffMetric( - m5_col=colmap["fiveSigmaDepth"], - filter_col=colmap["filter"], - normed=False, - metric_name="Total Teff", - ) + metric = metrics.SumMetric(col="t_eff", metric_name="Total Teff") slicer = slicers.UniSlicer() bundle = mb.MetricBundle( metric, @@ -269,17 +264,14 @@ def tEffMetrics( displayDict["caption"] = "Normalized total effective time of the survey (see Teff metric)." displayDict["order"] = 1 - metric = metrics.TeffMetric( - m5_col=colmap["fiveSigmaDepth"], - filter_col=colmap["filter"], - normed=True, - metric_name="Normalized Teff", - ) + metric = metrics.MeanMetric(col="t_eff", metric_name="Normalized Teff") + normalized_teff_stacker = stackers.TeffStacker(normed=True) slicer = slicers.UniSlicer() bundle = mb.MetricBundle( metric, slicer, constraint=sqls["all"], + stacker_list=[normalized_teff_stacker], display_dict=displayDict, info_label=info_label["all"], ) @@ -288,12 +280,8 @@ def tEffMetrics( # Generate Teff maps in all and per filters displayDict = {"group": "T_eff Maps", "subgroup": subgroup} - metric = metrics.TeffMetric( - m5_col=colmap["fiveSigmaDepth"], - filter_col=colmap["filter"], - normed=True, - metric_name="Normalized Teff", - ) + metric = metrics.MeanMetric(col="t_eff", metric_name="Normalized Teff") + normalized_teff_stacker = stackers.TeffStacker(normed=True) for f in filterlist: displayDict["caption"] = "Normalized effective time of the survey, for %s" % info_label[f] displayDict["order"] = orders[f] @@ -302,6 +290,7 @@ def tEffMetrics( metric, skyslicer, sqls[f], + stacker_list=[normalized_teff_stacker], info_label=info_label[f], display_dict=displayDict, plot_dict=plotDict, diff --git a/rubin_sim/maf/metrics/technical_metrics.py b/rubin_sim/maf/metrics/technical_metrics.py index 559a87a4..78bfe1a5 100644 --- a/rubin_sim/maf/metrics/technical_metrics.py +++ b/rubin_sim/maf/metrics/technical_metrics.py @@ -3,7 +3,6 @@ "MinTimeBetweenStatesMetric", "NStateChangesFasterThanMetric", "MaxStateChangesWithinMetric", - "TeffMetric", "OpenShutterFractionMetric", "BruteOSFMetric", ) @@ -186,80 +185,6 @@ def run(self, data_slice, slice_point=None): return nchanges.max() -class TeffMetric(BaseMetric): - """ - Effective time equivalent for a given set of visits. - """ - - def __init__( - self, - m5_col="fiveSigmaDepth", - filter_col="filter", - metric_name="tEff", - fiducial_depth=None, - teff_base=30.0, - normed=False, - **kwargs, - ): - self.m5_col = m5_col - self.filter_col = filter_col - if fiducial_depth is None: - # From reference von Karman 500nm zenith seeing of 0.69" - # median zenith dark seeing from sims_skybrightness_pre - # airmass = 1 - # 2 "snaps" of 15 seconds each - # m5_flat_sed sysEngVals from rubin_sim - # commit 6d03bd49550972e48648503ed60784a4e6775b82 (2021-05-18) - # These include constants from: - # https://github.com/lsst-pst/syseng_throughputs/blob/master/notebooks/generate_sims_values.ipynb - # commit 7abb90951fcbc70d9c4d0c805c55a67224f9069f (2021-05-05) - # See https://github.com/lsst-sims/smtn-002/blob/master/notebooks/teff_fiducial.ipynb - self.depth = { - "u": 23.71, - "g": 24.67, - "r": 24.24, - "i": 23.82, - "z": 23.21, - "y": 22.40, - } - else: - if isinstance(fiducial_depth, dict): - self.depth = fiducial_depth - else: - raise ValueError("fiducial_depth should be None or dictionary") - self.teff_base = teff_base - self.normed = normed - if self.normed: - units = "" - else: - units = "seconds" - super(TeffMetric, self).__init__( - col=[m5_col, filter_col], metric_name=metric_name, units=units, **kwargs - ) - if self.normed: - self.comment = "Normalized effective time" - else: - self.comment = "Effect time" - self.comment += " of a series of observations, evaluating the equivalent amount of time" - self.comment += " each observation would require if taken at a fiducial limiting magnitude." - self.comment += " Fiducial depths are : %s" % self.depth - if self.normed: - self.comment += " Normalized by the total amount of time actual on-sky." - - def run(self, data_slice, slice_point=None): - filters = np.unique(data_slice[self.filter_col]) - teff = 0.0 - for f in filters: - match = np.where(data_slice[self.filter_col] == f)[0] - teff += (10.0 ** (0.8 * (data_slice[self.m5_col][match] - self.depth[f]))).sum() - teff *= self.teff_base - if self.normed: - # Normalize by the t_eff equivalent if each observation - # was at the fiducial depth. - teff = teff / (self.teff_base * data_slice[self.m5_col].size) - return teff - - class OpenShutterFractionMetric(BaseMetric): """Compute the fraction of time the shutter is open compared to the total time spent observing. diff --git a/rubin_sim/maf/stackers/__init__.py b/rubin_sim/maf/stackers/__init__.py index be999fba..e3fef24d 100644 --- a/rubin_sim/maf/stackers/__init__.py +++ b/rubin_sim/maf/stackers/__init__.py @@ -1,5 +1,6 @@ from .base_stacker import * from .coord_stackers import * +from .date_stackers import * from .general_stackers import * from .get_col_info import * from .label_stackers import * @@ -9,3 +10,4 @@ from .neo_dist_stacker import * from .sdss_stackers import * from .sn_stacker import * +from .teff_stacker import * diff --git a/rubin_sim/maf/stackers/date_stackers.py b/rubin_sim/maf/stackers/date_stackers.py new file mode 100644 index 00000000..5fb4831f --- /dev/null +++ b/rubin_sim/maf/stackers/date_stackers.py @@ -0,0 +1,140 @@ +__all__ = ( + "ObservationStartDatetime64Stacker", + "DayObsStacker", + "DayObsMJDStacker", + "DayObsISOStacker", +) + +import numpy as np +from astropy.time import Time + +from .base_stacker import BaseStacker + + +class ObservationStartDatetime64Stacker(BaseStacker): + """Add the observation start time as a numpy.datetime64.""" + + cols_added = ["observationStartDatetime64"] + + def __init__( + self, + mjd_col="observationStartMJD", + ): + self.mjd_col = mjd_col + self.units = [None] + self.cols_added_dtypes = ["datetime64[ns]"] + + def _run(self, sim_data, cols_present=False): + if cols_present: + # Column already present in data; assume it is correct and does not + # need recalculating. + return sim_data + + sim_data["observationStartDatetime64"] = Time(sim_data[self.mjd_col], format="mjd").datetime64 + + return sim_data + + +def _compute_day_obs_mjd(mjd): + day_obs_mjd = np.floor(mjd - 0.5).astype("int") + return day_obs_mjd + + +def _compute_day_obs_astropy_time(mjd): + day_obs_time = Time(_compute_day_obs_mjd(mjd), format="mjd") + return day_obs_time + + +def _compute_day_obs_iso8601(mjd): + iso_times = _compute_day_obs_astropy_time(mjd).iso + + # Work both for mjd as a scalar and a numpy array + if isinstance(iso_times, str): + day_obs_iso = iso_times[:10] + else: + day_obs_iso = np.array([d[:10] for d in iso_times]) + + return day_obs_iso + + +def _compute_day_obs_int(mjd): + day_obs_int = np.array([d.replace("-", "") for d in _compute_day_obs_iso8601(mjd)]) + + return day_obs_int + + +class DayObsStacker(BaseStacker): + """Add dayObs as, as defined by SITCOMTN-32. + + Parameters + ---------- + mjd_col : `str` + The column with the observatin start MJD. + """ + + cols_added = ["dayObs"] + + def __init__(self, mjd_col="observationStartMJD"): + self.mjd_col = mjd_col + self.units = ["days"] + self.cols_added_dtypes = [int] + + def _run(self, sim_data, cols_present=False): + if cols_present: + # Column already present in data; assume it is correct and does not + # need recalculating. + return sim_data + + sim_data[self.cols_added[0]] = _compute_day_obs_int(sim_data[self.mjd_col]) + return sim_data + + +class DayObsMJDStacker(BaseStacker): + """Add dayObs defined by SITCOMTN-32, as an MJD. + + Parameters + ---------- + mjd_col : `str` + The column with the observatin start MJD. + """ + + cols_added = ["day_obs_mjd"] + + def __init__(self, mjd_col="observationStartMJD"): + self.mjd_col = mjd_col + self.units = ["days"] + self.cols_added_dtypes = [int] + + def _run(self, sim_data, cols_present=False): + if cols_present: + # Column already present in data; assume it is correct and does not + # need recalculating. + return sim_data + + sim_data[self.cols_added[0]] = _compute_day_obs_mjd(sim_data[self.mjd_col]) + return sim_data + + +class DayObsISOStacker(BaseStacker): + """Add dayObs as defined by SITCOMTN-32, in ISO 8601 format. + + Parameters + ---------- + mjd_col : `str` + The column with the observatin start MJD.""" + + cols_added = ["day_obs_iso8601"] + + def __init__(self, mjd_col="observationStartMJD"): + self.mjd_col = mjd_col + self.units = [None] + self.cols_added_dtypes = [(str, 10)] + + def _run(self, sim_data, cols_present=False): + if cols_present: + # Column already present in data; assume it is correct and does not + # need recalculating. + return sim_data + + sim_data[self.cols_added[0]] = _compute_day_obs_iso8601(sim_data[self.mjd_col]) + return sim_data diff --git a/rubin_sim/maf/stackers/general_stackers.py b/rubin_sim/maf/stackers/general_stackers.py index 21f09883..7fc8c142 100644 --- a/rubin_sim/maf/stackers/general_stackers.py +++ b/rubin_sim/maf/stackers/general_stackers.py @@ -7,6 +7,7 @@ "DcrStacker", "FiveSigmaStacker", "SaturationStacker", + "OverheadStacker", ) import warnings @@ -20,6 +21,7 @@ from rubin_sim.maf.utils import load_inst_zeropoints from .base_stacker import BaseStacker +from .date_stackers import DayObsMJDStacker class SaturationStacker(BaseStacker): @@ -32,7 +34,8 @@ class SaturationStacker(BaseStacker): saturation_e : float, optional (150e3) The saturation level in electrons zeropoints : dict-like, optional (None) - The zeropoints for the telescope. Keys should be str with filter names, values in mags. + The zeropoints for the telescope. Keys should be str with filter names, + values in mags. If None, will use Rubin-like zeropoints. km : dict-like, optional (None) Atmospheric extinction values. Keys should be str with filter names. @@ -84,17 +87,19 @@ def _run(self, sim_data, cols_present=False): in_filt = np.where(sim_data[self.filter_col] == filtername)[0] # Calculate the length of the on-sky time per EXPOSURE exptime = sim_data[self.exptime_col][in_filt] / sim_data[self.nexp_col][in_filt] - # Calculate sky counts per pixel per second from skybrightness + zeropoint (e/1s) + # Calculate sky counts per pixel per second + # from skybrightness + zeropoint (e/1s) sky_counts = ( 10.0 ** (0.4 * (self.zeropoints[filtername] - sim_data[self.skybrightness_col][in_filt])) * self.pixscale**2 ) # Total sky counts in each exposure sky_counts = sky_counts * exptime - # The counts available to the source (at peak) in each exposure is the - # difference between saturation and sky + # The counts available to the source (at peak) in each exposure is + # the difference between saturation and sky remaining_counts_peak = self.saturation_e - sky_counts - # Now to figure out how many counts there would be total, if there are that many in the peak + # Now to figure out how many counts there would be total, if there + # are that many in the peak sigma = sim_data[self.seeing_col][in_filt] / 2.354 source_counts = remaining_counts_peak * 2.0 * np.pi * (sigma / self.pixscale) ** 2 # source counts = counts per exposure (expTimeCol / nexp) @@ -112,8 +117,8 @@ def _run(self, sim_data, cols_present=False): class FiveSigmaStacker(BaseStacker): - """ - Calculate the 5-sigma limiting depth for a point source in the given conditions. + """Calculate the 5-sigma limiting depth for a point source in the given + conditions. This is generally not needed, unless the m5 parameters have been updated or m5 was not previously calculated. @@ -182,10 +187,12 @@ def __init__( def _run(self, sim_data, cols_present=False): """Calculate new column for normalized airmass.""" # Run method is required to calculate column. - # Driver runs getColInfo to know what columns are needed from db & which are calculated, - # then gets data from db and then calculates additional columns (via run methods here). + # Driver runs getColInfo to know what columns are needed from db & + # which are calculated, then gets data from db and then calculates + # additional columns (via run methods here). if cols_present: - # Column already present in data; assume it is correct and does not need recalculating. + # Column already present in data; assume it is correct and does not + # need recalculating. return sim_data dec = sim_data[self.dec_col] if self.degrees: @@ -198,7 +205,8 @@ def _run(self, sim_data, cols_present=False): class ZenithDistStacker(BaseStacker): """Calculate the zenith distance for each pointing. - If 'degrees' is True, then assumes alt_col is in degrees and returns degrees. + If 'degrees' is True, then assumes alt_col is in degrees and returns + degrees. If 'degrees' is False, assumes alt_col is in radians and returns radians. """ @@ -216,7 +224,8 @@ def __init__(self, alt_col="altitude", degrees=True): def _run(self, sim_data, cols_present=False): """Calculate new column for zenith distance.""" if cols_present: - # Column already present in data; assume it is correct and does not need recalculating. + # Column already present in data; assume it is correct and does not + # need recalculating. return sim_data if self.degrees: sim_data["zenithDistance"] = 90.0 - sim_data[self.alt_col] @@ -226,7 +235,8 @@ def _run(self, sim_data, cols_present=False): class ParallaxFactorStacker(BaseStacker): - """Calculate the parallax factors for each opsim pointing. Output parallax factor in arcseconds.""" + """Calculate the parallax factors for each opsim pointing. + Output parallax factor in arcseconds.""" cols_added = ["ra_pi_amp", "dec_pi_amp"] @@ -245,7 +255,8 @@ def __init__( self.degrees = degrees def _gnomonic_project_toxy(self, ra1, dec1, r_acen, deccen): - """Calculate x/y projection of ra1/dec1 in system with center at r_acen, Deccenp. + """Calculate x/y projection of ra1/dec1 in system with center at + r_acen, Deccenp. Input radians. """ # also used in Global Telescope Network website @@ -256,7 +267,8 @@ def _gnomonic_project_toxy(self, ra1, dec1, r_acen, deccen): def _run(self, sim_data, cols_present=False): if cols_present: - # Column already present in data; assume it is correct and does not need recalculating. + # Column already present in data; assume it is correct and does not + # need recalculating. return sim_data ra_pi_amp = np.zeros(np.size(sim_data), dtype=[("ra_pi_amp", "float")]) dec_pi_amp = np.zeros(np.size(sim_data), dtype=[("dec_pi_amp", "float")]) @@ -284,11 +296,13 @@ def _run(self, sim_data, cols_present=False): class DcrStacker(BaseStacker): - """Calculate the RA,Dec offset expected for an object due to differential chromatic refraction. + """Calculate the RA,Dec offset expected for an object due to differential + chromatic refraction. - For DCR calculation, we also need zenithDistance, HA, and PA -- but these will be explicitly - handled within this stacker so that setup is consistent and they run in order. If those values - have already been calculated elsewhere, they will not be overwritten. + For DCR calculation, we also need zenithDistance, HA, and PA -- but these + will be explicitly handled within this stacker so that setup is consistent + and they run in order. If those values have already been calculated + elsewhere, they will not be overwritten. Parameters ---------- @@ -301,20 +315,23 @@ class DcrStacker(BaseStacker): dec_col : str Name of the column with Dec. Default 'fieldDec'. lstCol : str - Name of the column with local sidereal time. Default 'observationStartLST'. + Name of the column with local sidereal time. Default + 'observationStartLST'. site : str or rubin_sim.utils.Site Name of the observory or a rubin_sim.utils.Site object. Default 'LSST'. mjdCol : str Name of column with modified julian date. Default 'observationStartMJD' dcr_magnitudes : dict - Magitude of the DCR offset for each filter at altitude/zenith distance of 45 degrees. - Defaults u=0.07, g=0.07, r=0.50, i=0.045, z=0.042, y=0.04 (all arcseconds). + Magitude of the DCR offset for each filter at altitude/zenith distance + of 45 degrees. Defaults u=0.07, g=0.07, r=0.50, i=0.045, z=0.042, + y=0.04 (all arcseconds). Returns ------- numpy.array - Returns array with additional columns 'ra_dcr_amp' and 'dec_dcr_amp' with the DCR offsets - for each observation. Also runs ZenithDistStacker and ParallacticAngleStacker. + Returns array with additional columns 'ra_dcr_amp' and 'dec_dcr_amp' + with the DCR offsets for each observation. Also runs ZenithDistStacker + and ParallacticAngleStacker. """ cols_added = ["ra_dcr_amp", "dec_dcr_amp"] # zenithDist, HA, PA @@ -351,8 +368,9 @@ def __init__( self.dec_col = dec_col self.degrees = degrees self.cols_req = [filter_col, ra_col, dec_col, alt_col, lst_col] - # 'zenithDist', 'PA', 'HA' are additional columns required, coming from other stackers which must - # also be configured -- so we handle this explicitly here. + # 'zenithDist', 'PA', 'HA' are additional columns required, coming + # from other stackers which must also be configured -- so we handle + # this explicitly here. self.zstacker = ZenithDistStacker(alt_col=alt_col, degrees=self.degrees) self.pastacker = ParallacticAngleStacker( ra_col=ra_col, @@ -367,10 +385,11 @@ def __init__( def _run(self, sim_data, cols_present=False): if cols_present: - # Column already present in data; assume it is correct and does not need recalculating. + # Column already present in data; assume it is correct and does not + # need recalculating. return sim_data - # Need to make sure the Zenith stacker gets run first - # Call _run method because already added these columns due to 'colsAdded' line. + # Need to make sure the Zenith stacker gets run first Call _run method + # because already added these columns due to 'colsAdded' line. sim_data = self.zstacker.run(sim_data) sim_data = self.pastacker.run(sim_data) if self.degrees: @@ -407,7 +426,8 @@ def __init__(self, lst_col="observationStartLST", ra_col="fieldRA", degrees=True def _run(self, sim_data, cols_present=False): """HA = LST - RA""" if cols_present: - # Column already present in data; assume it is correct and does not need recalculating. + # Column already present in data; assume it is correct and does not + # need recalculating. return sim_data if len(sim_data) == 0: return sim_data @@ -434,7 +454,8 @@ def _run(self, sim_data, cols_present=False): class ParallacticAngleStacker(BaseStacker): """Add the parallactic angle to each visit. - If 'degrees' is True, this will be in degrees (as are all other angles). If False, then in radians. + If 'degrees' is True, this will be in degrees (as are all other angles). If + False, then in radians. """ cols_added = ["PA"] @@ -464,9 +485,11 @@ def _run(self, sim_data, cols_present=False): # or # http://www.gb.nrao.edu/GBT/DA/gbtidl/release2pt9/contrib/contrib/parangle.pro if cols_present: - # Column already present in data; assume it is correct and does not need recalculating. + # Column already present in data; assume it is correct and does not + # need recalculating. return sim_data - # Using the run method (not _run) means that if HA is present, it will not be recalculated. + # Using the run method (not _run) means that if HA is present, it will + # not be recalculated. sim_data = self.ha_stacker.run(sim_data) if self.degrees: dec = np.radians(sim_data[self.dec_col]) @@ -482,3 +505,76 @@ def _run(self, sim_data, cols_present=False): if self.degrees: sim_data["PA"] = np.degrees(sim_data["PA"]) return sim_data + + +class OverheadStacker(BaseStacker): + """Add time between visits in seconds. + + Parameters + ---------- + max_gap : `float`, optional + The maximum gap between observations, in minutes. + Assume anything longer the dome has closed. + Defaults to infinity. + mjd_col : `str`, optional + The name of the column with the observation start MJD. + Defaults to "obsevationStartMJD". + visit_time_col : `str`, optional + The name of the column with the total visit time. + Defaults to "visitTime". + exposure_time_col : `str`, optional + The name of the column with the visit exposure time. + Defaults to "visitExposureTime." + """ + + cols_added = ["overhead"] + + def __init__( + self, + max_gap=np.inf, + mjd_col="observationStartMJD", + visit_time_col="visitTime", + exposure_time_col="visitExposureTime", + ): + # Set max_gap in minutes to match API of existing BruteOSFMetric + self.max_gap = max_gap + self.mjd_col = mjd_col + self.visit_time_col = visit_time_col + self.exposure_time_col = exposure_time_col + self.units = ["seconds"] + self.day_obs_mjd_stacker = DayObsMJDStacker(self.mjd_col) + + def _run(self, sim_data, cols_present=False): + if cols_present: + # Column already present in data; assume it is correct and does not + # need recalculating. + return sim_data + + # Count all non-exposure time between the end of the previous exposure + # and the end of the current exposure as overhead for the current + # exposure. + observation_end_mjd = sim_data[self.mjd_col] + sim_data[self.visit_time_col] / (24 * 60 * 60) + overhead = ( + np.diff(observation_end_mjd, prepend=np.nan) * 24 * 60 * 60 - sim_data[self.exposure_time_col] + ) + + # Rough heuristic not to count downtime due to weather or instrument + # problems as overhead. A more reliable way to do this would be to + # use other sources of weather data or problem reporting to identify + # these gaps. + # We might also explicitly want to look at the gaps that arise from + # these problems in order to measure time lost to these causes. + overhead[overhead > self.max_gap * 60] = np.nan + + # If the gap includes a change of night, it isn't really overhead. For + # most reasonable finite values of self.max_gap, this is never + # relevant, but it's somethimes useful to set max_gap to inifinity to + # catch all delays in the night. (The the comment above.) But, in + # even in this case, we would still not want to include gaps between + # nights. + day_obs_mjd = self.day_obs_mjd_stacker.run(sim_data)["day_obs_mjd"] + different_night = np.diff(day_obs_mjd, prepend=np.nan) != 0 + overhead[different_night] = np.nan + + sim_data[self.cols_added[0]] = overhead + return sim_data diff --git a/rubin_sim/maf/stackers/teff_stacker.py b/rubin_sim/maf/stackers/teff_stacker.py new file mode 100644 index 00000000..a89a2e74 --- /dev/null +++ b/rubin_sim/maf/stackers/teff_stacker.py @@ -0,0 +1,145 @@ +__all__ = ["TEFF_FIDUCIAL_DEPTH", "TEFF_FIDUCIAL_EXPTIME", "TeffStacker", "compute_teff"] + +import numpy as np + +from .base_stacker import BaseStacker + +# From reference von Karman 500nm zenith seeing of 0.69" +# median zenith dark seeing from sims_skybrightness_pre +# airmass = 1 +# 2 "snaps" of 15 seconds each +# m5_flat_sed sysEngVals from rubin_sim +# commit 6d03bd49550972e48648503ed60784a4e6775b82 (2021-05-18) +# These include constants from: +# https://github.com/lsst-pst/syseng_throughputs/blob/master/notebooks/generate_sims_values.ipynb +# commit 7abb90951fcbc70d9c4d0c805c55a67224f9069f (2021-05-05) +# See https://github.com/lsst-sims/smtn-002/blob/master/notebooks/teff_fiducial.ipynb +TEFF_FIDUCIAL_DEPTH = { + "u": 23.71, + "g": 24.67, + "r": 24.24, + "i": 23.82, + "z": 23.21, + "y": 22.40, +} + +TEFF_FIDUCIAL_EXPTIME = 30.0 + + +def compute_teff(m5_depth, filter_name, exptime=None, fiducial_depth=None, teff_base=None, normed=False): + """Compute the effective exposure time for a limiting magnitude. + + Parameters + ---------- + m5_depth : `float` or `numpy.ndarray`, (N,) + The five sigma point source limiting magintude. + filter_name : `str` or `numpy.ndarray`, (N,) + The name of the filter. + exptime : `float` or `numpy.ndarray`, (N,) + The expsore time (seconds), defaults to TEFF_FIDUCIAL_EXPTIME. + fiducial_depth: `dict` [`str`, `float`] + A mapping of filter to fiducial depth. + Defaults to TEFF_FIDUCIAL_DEPTH. + teff_base : `float` + The exposure time (in seconds) corresponding to the exposure depth. + normed : `bool` + Normalize against the exposure time, such that a value of 1 corresponds + to the exposure having been taken at fiducial conditions. Defaults + to False. + + Returns + ------- + t_eff : `float` + Effective expsore time, in seconds (if normed is False) or unitless + (if normed is true). + """ + if fiducial_depth is None: + fiducial_depth = TEFF_FIDUCIAL_DEPTH + + if teff_base is None: + teff_base = TEFF_FIDUCIAL_EXPTIME + + if exptime is None: + exptime = TEFF_FIDUCIAL_EXPTIME + + try: + fiducial_m5_depth = fiducial_depth[filter_name] + except TypeError: + # If filter_name is not one value, but an iterable, map it according + # to fiducial_depth. + if len(filter_name) != len(m5_depth): + raise ValueError() + + fiducial_m5_depth = np.array([fiducial_depth[b] for b in filter_name]) + + t_eff = teff_base * 10.0 ** (0.8 * (m5_depth - fiducial_m5_depth)) + + if normed: + tau = t_eff / exptime + return tau + else: + return t_eff + + +class TeffStacker(BaseStacker): + """Add t_eff column corresponding to fiveSigmaDepth. + + Parameters + ---------- + m5_col : `str` ('fiveSigmaDepth') + Colum name for the five sigma limiting depth per pointing. + Defaults to "fiveSigmaDepth". + filter_col : `str` + Defaults to "filter" + exptime_col : `str` + The column with the exposure time, defaults to + "visitExposureTime" + fiducial_depth: `dict` [`str`, `float`] + A mapping of filter to fiducial depth. + Defaults to TEFF_FIDUCIAL_DEPTH. + fiducial_exptime : `float` + The exposure time (in seconds) corresponding to the exposure depth. + normed : `bool` + Normalize to exporuse time. + """ + + cols_added = ["t_eff"] + + def __init__( + self, + m5_col="fiveSigmaDepth", + filter_col="filter", + exptime_col="visitExposureTime", + fiducial_depth=None, + fiducial_exptime=None, + normed=False, + **kwargs, + ): + self.m5_col = m5_col + self.filter_col = filter_col + self.exptime_col = exptime_col + self.normed = normed + self.fiducial_depth = TEFF_FIDUCIAL_DEPTH if fiducial_depth is None else fiducial_depth + self.fiducial_exptime = TEFF_FIDUCIAL_EXPTIME if fiducial_exptime is None else fiducial_exptime + self.cols_req = [self.m5_col, self.filter_col] + if self.normed and self.exptime_col not in self.cols_req: + self.cols_req.append(self.exptime_col) + + self.units = "" if self.normed else "seconds" + + def _run(self, sim_data, cols_present=False): + if cols_present: + # Column already present in data; assume it is correct and does not + # need recalculating. + return sim_data + + sim_data["t_eff"] = compute_teff( + sim_data[self.m5_col], + sim_data[self.filter_col], + sim_data[self.exptime_col], + self.fiducial_depth, + self.fiducial_exptime, + normed=self.normed, + ) + + return sim_data diff --git a/tests/maf/test_stackers.py b/tests/maf/test_stackers.py index 1df28393..0349de82 100644 --- a/tests/maf/test_stackers.py +++ b/tests/maf/test_stackers.py @@ -5,12 +5,18 @@ import numpy as np from astropy import units as u from astropy.coordinates import SkyCoord +from astropy.time import Time from rubin_scheduler.data import get_data_dir from rubin_scheduler.utils import Site, _alt_az_pa_from_ra_dec, calc_lmst import rubin_sim.maf.stackers as stackers from rubin_sim.maf import get_sim_data +try: + import lsst.summit.utils.efdUtils +except ModuleNotFoundError: + pass + class TestStackerClasses(unittest.TestCase): def setUp(self): @@ -445,6 +451,117 @@ def test_ecliptic_stacker(self): s = stackers.EclipticStacker(ra_col="ra", dec_col="dec", degrees=True, subtract_sun_lon=False) _ = s.run(data) + def test_teff_stacker(self): + rng = np.random.default_rng(seed=6563) + num_points = 5 + data = np.zeros( + num_points, + dtype=list(zip(["fiveSigmaDepth", "filter", "visitExposureTime"], [float, (np.str_, 1), float])), + ) + data["fiveSigmaDepth"] = 23 + rng.random(num_points) + data["filter"] = ["g"] * num_points + data["visitExposureTime"] = [30] * num_points + + stacker = stackers.TeffStacker("fiveSigmaDepth", "filter", "visitExposureTime") + value = stacker.run(data) + np.testing.assert_array_equal(value["fiveSigmaDepth"], value["fiveSigmaDepth"]) + assert np.all(0.1 < value["t_eff"]) and np.all(value["t_eff"] < 10) + + def test_observation_start_datetime64_stacker(self): + rng = np.random.default_rng(seed=6563) + num_points = 5 + data = np.zeros( + num_points, + dtype=list(zip(["observationStartMJD"], [float])), + ) + data["observationStartMJD"] = 61000 + 3000 * rng.random(num_points) + + stacker = stackers.ObservationStartDatetime64Stacker("observationStartMJD") + value = stacker.run(data) + recovered_mjd = Time(value["observationStartDatetime64"], format="datetime64").mjd + assert np.allclose(recovered_mjd, data["observationStartMJD"]) + + def test_day_obs_stackers(self): + rng = np.random.default_rng(seed=6563) + num_points = 5 + obs_start_mjds = 61000 + 3000 * rng.random(num_points) + + data = np.zeros( + num_points, + dtype=list(zip(["observationStartMJD"], [float])), + ) + data["observationStartMJD"] = obs_start_mjds + + try: + summit_get_day_obs = lsst.summit.utils.efdUtils.getDayObsForTime + except NameError: + summit_get_day_obs = None + + day_obs_int_stacker = stackers.DayObsStacker("observationStartMJD") + day_obs_int = day_obs_int_stacker.run(data) + assert np.all(day_obs_int["dayObs"] > 20200000) + assert np.all(day_obs_int["dayObs"] < 20500000) + if summit_get_day_obs is not None: + summit_day_obs_int = np.array([summit_get_day_obs(Time(m, format="mjd")) for m in obs_start_mjds]) + np.testing.assert_array_equal(day_obs_int["dayObs"], summit_day_obs_int) + + day_obs_mjd_stacker = stackers.DayObsMJDStacker("observationStartMJD") + new_data = day_obs_mjd_stacker.run(data) + # If there were no offset, all values would be between 0 and 1. + # With the 0.5 day offset specified in SITCOMTN-32, this becomes + # 0.5 and 1.5. + assert np.all((obs_start_mjds - new_data["day_obs_mjd"]) <= 1.5) + assert np.all((obs_start_mjds - new_data["day_obs_mjd"]) >= 0.5) + + day_obs_iso_stacker = stackers.DayObsISOStacker("observationStartMJD") + _ = day_obs_iso_stacker.run(data) + + def test_overhead_stacker(self): + num_visits = 10 + num_first_night_visits = 5 + + start_mjd = 61000.2 + exptime = 30.0 + + exposure_times = np.full(num_visits, exptime, dtype=float) + + rng = np.random.default_rng(seed=6563) + overheads = 2.0 + rng.random(num_visits) + + visit_times = overheads + exposure_times + observation_end_mjds = start_mjd + np.cumsum(visit_times) / (24 * 60 * 60) + mjds = observation_end_mjds - visit_times / (24 * 60 * 60) + mjds[num_first_night_visits:] += 1 + + data = np.core.records.fromarrays( + (mjds, exposure_times, visit_times), + dtype=np.dtype( + [ + ("observationStartMJD", mjds.dtype), + ("visitExposureTime", exposure_times.dtype), + ("visitTime", visit_times.dtype), + ] + ), + ) + + overhead_stacker = stackers.OverheadStacker() + new_data = overhead_stacker.run(data) + measured_overhead = new_data["overhead"] + + # There is no visit before the first, so the first overhead should be + # nan. + self.assertTrue(np.isnan(measured_overhead[0])) + + # Test that the gap between nights is nan. + self.assertTrue(np.isnan(measured_overhead[num_first_night_visits])) + + # Make sure there are no unexpected nans + self.assertEqual(np.count_nonzero(np.isnan(measured_overhead)), 2) + + # Make sure all measured values are correct + these_gaps = ~np.isnan(measured_overhead) + self.assertTrue(np.allclose(measured_overhead[these_gaps], overheads[these_gaps])) + if __name__ == "__main__": unittest.main() diff --git a/tests/maf/test_technicalmetrics.py b/tests/maf/test_technicalmetrics.py index 66fded0f..a068abe6 100644 --- a/tests/maf/test_technicalmetrics.py +++ b/tests/maf/test_technicalmetrics.py @@ -3,6 +3,82 @@ import numpy as np import rubin_sim.maf.metrics as metrics +import rubin_sim.maf.stackers as stackers +from rubin_sim.maf.metrics.base_metric import BaseMetric + + +class OldTeffMetric(BaseMetric): + """ + Effective time equivalent for a given set of visits. + """ + + def __init__( + self, + m5_col="fiveSigmaDepth", + filter_col="filter", + metric_name="tEff", + fiducial_depth=None, + teff_base=30.0, + normed=False, + **kwargs, + ): + self.m5_col = m5_col + self.filter_col = filter_col + if fiducial_depth is None: + # From reference von Karman 500nm zenith seeing of 0.69" + # median zenith dark seeing from sims_skybrightness_pre + # airmass = 1 + # 2 "snaps" of 15 seconds each + # m5_flat_sed sysEngVals from rubin_sim + # commit 6d03bd49550972e48648503ed60784a4e6775b82 (2021-05-18) + # These include constants from: + # https://github.com/lsst-pst/syseng_throughputs/blob/master/notebooks/generate_sims_values.ipynb + # commit 7abb90951fcbc70d9c4d0c805c55a67224f9069f (2021-05-05) + # See https://github.com/lsst-sims/smtn-002/blob/master/notebooks/teff_fiducial.ipynb + self.depth = { + "u": 23.71, + "g": 24.67, + "r": 24.24, + "i": 23.82, + "z": 23.21, + "y": 22.40, + } + else: + if isinstance(fiducial_depth, dict): + self.depth = fiducial_depth + else: + raise ValueError("fiducial_depth should be None or dictionary") + self.teff_base = teff_base + self.normed = normed + if self.normed: + units = "" + else: + units = "seconds" + super(OldTeffMetric, self).__init__( + col=[m5_col, filter_col], metric_name=metric_name, units=units, **kwargs + ) + if self.normed: + self.comment = "Normalized effective time" + else: + self.comment = "Effect time" + self.comment += " of a series of observations, evaluating the equivalent amount of time" + self.comment += " each observation would require if taken at a fiducial limiting magnitude." + self.comment += " Fiducial depths are : %s" % self.depth + if self.normed: + self.comment += " Normalized by the total amount of time actual on-sky." + + def run(self, data_slice, slice_point=None): + filters = np.unique(data_slice[self.filter_col]) + teff = 0.0 + for f in filters: + match = np.where(data_slice[self.filter_col] == f)[0] + teff += (10.0 ** (0.8 * (data_slice[self.m5_col][match] - self.depth[f]))).sum() + teff *= self.teff_base + if self.normed: + # Normalize by the t_eff equivalent if each observation + # was at the fiducial depth. + teff = teff / (self.teff_base * data_slice[self.m5_col].size) + return teff class TestTechnicalMetrics(unittest.TestCase): @@ -78,23 +154,38 @@ def test_max_state_changes_within_metric(self): result = metric.run(data) # minutes self.assertEqual(result, 0) - def test_teff_metric(self): - """ - Test the Teff (time_effective) metric. - """ - filters = np.array(["g", "g", "g", "g", "g"]) - m5 = np.zeros(len(filters), float) + 25.0 - data = np.core.records.fromarrays([m5, filters], names=["fiveSigmaDepth", "filter"]) - metric = metrics.TeffMetric(fiducial_depth={"g": 25}, teff_base=30.0) + def test_teff_regression(self): + """Test this teff implementation matches the old one.""" + num_points = 50 + bands = tuple("ugrizy") + rng = np.random.default_rng(seed=6563) + + m5 = 24 + rng.random(num_points) + filters = rng.choice(bands, num_points) + fiducial_depth = {b: 24 + rng.random() for b in bands} + exposure_time = np.full(num_points, 30.0, dtype=float) + data = np.core.records.fromarrays( + [m5, filters, exposure_time], names=["fiveSigmaDepth", "filter", "visitExposureTime"] + ) + teff_stacker = stackers.TeffStacker(fiducial_depth=fiducial_depth, teff_base=30.0) + data = teff_stacker.run(data) + + metric = metrics.SumMetric(col="t_eff") result = metric.run(data) - self.assertEqual(result, 30.0 * m5.size) - filters = np.array(["g", "g", "g", "u", "u"]) - m5 = np.zeros(len(filters), float) + 25.0 - m5[3:5] = 20.0 - data = np.core.records.fromarrays([m5, filters], names=["fiveSigmaDepth", "filter"]) - metric = metrics.TeffMetric(fiducial_depth={"u": 20, "g": 25}, teff_base=30.0) + old_metric = OldTeffMetric(fiducial_depth=fiducial_depth, teff_base=30.0) + old_result = old_metric.run(data) + self.assertEqual(result, old_result) + + data = np.core.records.fromarrays( + [m5, filters, exposure_time], names=["fiveSigmaDepth", "filter", "visitExposureTime"] + ) + teff_stacker = stackers.TeffStacker(fiducial_depth=fiducial_depth, teff_base=30.0, normed=True) + data = teff_stacker.run(data) + metric = metrics.MeanMetric(col="t_eff") result = metric.run(data) - self.assertEqual(result, 30.0 * m5.size) + old_metric = OldTeffMetric(fiducial_depth=fiducial_depth, teff_base=30.0, normed=True) + old_result = old_metric.run(data) + self.assertAlmostEqual(result, old_result) def test_open_shutter_fraction_metric(self): """