Skip to content

Commit

Permalink
Merge pull request #399 from lsst/tickets/PREOPS-4949
Browse files Browse the repository at this point in the history
PREOPS-4949: Add stackers useful for schedview (Teff and dayobs related)
  • Loading branch information
ehneilsen authored Mar 29, 2024
2 parents d0f51a8 + f8e1fad commit 8f02edc
Show file tree
Hide file tree
Showing 11 changed files with 654 additions and 147 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,6 @@ repos:
name: isort (python)
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.0.278
rev: v0.3.4
hooks:
- id: ruff
- id: ruff
3 changes: 1 addition & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ name = "rubin-sim"
description = "Scheduler, survey strategy analysis, and other simulation tools for Rubin Observatory."
readme = "README.md"
license = { text = "GPL" }
classifiers = [
classifiers = [
"Intended Audience :: Science/Research",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
Expand Down Expand Up @@ -143,4 +143,3 @@ max-doc-length = 79

[tool.ruff.lint.pydocstyle]
convention = "numpy"

7 changes: 5 additions & 2 deletions rubin_sim/maf/batches/glance_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,12 @@ def glanceBatch(
bundle_list.append(bundle)

# Total effective exposure time
metric = metrics.TeffMetric(m5_col=colmap["fiveSigmaDepth"], filter_col=colmap["filter"], normed=True)
metric = metrics.MeanMetric(col="t_eff")
teff_stacker = stackers.TeffStacker(normed=True)
for sql in sql_per_and_all_filters:
bundle = metric_bundles.MetricBundle(metric, slicer, sql, display_dict=displayDict)
bundle = metric_bundles.MetricBundle(
metric, slicer, sql, stacker_list=[teff_stacker], display_dict=displayDict
)
bundle_list.append(bundle)

# Number of observations, all and each filter
Expand Down
25 changes: 7 additions & 18 deletions rubin_sim/maf/batches/visitdepth_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,12 +251,7 @@ def tEffMetrics(
displayDict = {"group": "T_eff Summary", "subgroup": subgroup}
displayDict["caption"] = "Total effective time of the survey (see Teff metric)."
displayDict["order"] = 0
metric = metrics.TeffMetric(
m5_col=colmap["fiveSigmaDepth"],
filter_col=colmap["filter"],
normed=False,
metric_name="Total Teff",
)
metric = metrics.SumMetric(col="t_eff", metric_name="Total Teff")
slicer = slicers.UniSlicer()
bundle = mb.MetricBundle(
metric,
Expand All @@ -269,17 +264,14 @@ def tEffMetrics(

displayDict["caption"] = "Normalized total effective time of the survey (see Teff metric)."
displayDict["order"] = 1
metric = metrics.TeffMetric(
m5_col=colmap["fiveSigmaDepth"],
filter_col=colmap["filter"],
normed=True,
metric_name="Normalized Teff",
)
metric = metrics.MeanMetric(col="t_eff", metric_name="Normalized Teff")
normalized_teff_stacker = stackers.TeffStacker(normed=True)
slicer = slicers.UniSlicer()
bundle = mb.MetricBundle(
metric,
slicer,
constraint=sqls["all"],
stacker_list=[normalized_teff_stacker],
display_dict=displayDict,
info_label=info_label["all"],
)
Expand All @@ -288,12 +280,8 @@ def tEffMetrics(
# Generate Teff maps in all and per filters
displayDict = {"group": "T_eff Maps", "subgroup": subgroup}

metric = metrics.TeffMetric(
m5_col=colmap["fiveSigmaDepth"],
filter_col=colmap["filter"],
normed=True,
metric_name="Normalized Teff",
)
metric = metrics.MeanMetric(col="t_eff", metric_name="Normalized Teff")
normalized_teff_stacker = stackers.TeffStacker(normed=True)
for f in filterlist:
displayDict["caption"] = "Normalized effective time of the survey, for %s" % info_label[f]
displayDict["order"] = orders[f]
Expand All @@ -302,6 +290,7 @@ def tEffMetrics(
metric,
skyslicer,
sqls[f],
stacker_list=[normalized_teff_stacker],
info_label=info_label[f],
display_dict=displayDict,
plot_dict=plotDict,
Expand Down
75 changes: 0 additions & 75 deletions rubin_sim/maf/metrics/technical_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
"MinTimeBetweenStatesMetric",
"NStateChangesFasterThanMetric",
"MaxStateChangesWithinMetric",
"TeffMetric",
"OpenShutterFractionMetric",
"BruteOSFMetric",
)
Expand Down Expand Up @@ -186,80 +185,6 @@ def run(self, data_slice, slice_point=None):
return nchanges.max()


class TeffMetric(BaseMetric):
"""
Effective time equivalent for a given set of visits.
"""

def __init__(
self,
m5_col="fiveSigmaDepth",
filter_col="filter",
metric_name="tEff",
fiducial_depth=None,
teff_base=30.0,
normed=False,
**kwargs,
):
self.m5_col = m5_col
self.filter_col = filter_col
if fiducial_depth is None:
# From reference von Karman 500nm zenith seeing of 0.69"
# median zenith dark seeing from sims_skybrightness_pre
# airmass = 1
# 2 "snaps" of 15 seconds each
# m5_flat_sed sysEngVals from rubin_sim
# commit 6d03bd49550972e48648503ed60784a4e6775b82 (2021-05-18)
# These include constants from:
# https://github.com/lsst-pst/syseng_throughputs/blob/master/notebooks/generate_sims_values.ipynb
# commit 7abb90951fcbc70d9c4d0c805c55a67224f9069f (2021-05-05)
# See https://github.com/lsst-sims/smtn-002/blob/master/notebooks/teff_fiducial.ipynb
self.depth = {
"u": 23.71,
"g": 24.67,
"r": 24.24,
"i": 23.82,
"z": 23.21,
"y": 22.40,
}
else:
if isinstance(fiducial_depth, dict):
self.depth = fiducial_depth
else:
raise ValueError("fiducial_depth should be None or dictionary")
self.teff_base = teff_base
self.normed = normed
if self.normed:
units = ""
else:
units = "seconds"
super(TeffMetric, self).__init__(
col=[m5_col, filter_col], metric_name=metric_name, units=units, **kwargs
)
if self.normed:
self.comment = "Normalized effective time"
else:
self.comment = "Effect time"
self.comment += " of a series of observations, evaluating the equivalent amount of time"
self.comment += " each observation would require if taken at a fiducial limiting magnitude."
self.comment += " Fiducial depths are : %s" % self.depth
if self.normed:
self.comment += " Normalized by the total amount of time actual on-sky."

def run(self, data_slice, slice_point=None):
filters = np.unique(data_slice[self.filter_col])
teff = 0.0
for f in filters:
match = np.where(data_slice[self.filter_col] == f)[0]
teff += (10.0 ** (0.8 * (data_slice[self.m5_col][match] - self.depth[f]))).sum()
teff *= self.teff_base
if self.normed:
# Normalize by the t_eff equivalent if each observation
# was at the fiducial depth.
teff = teff / (self.teff_base * data_slice[self.m5_col].size)
return teff


class OpenShutterFractionMetric(BaseMetric):
"""Compute the fraction of time the shutter is open
compared to the total time spent observing.
Expand Down
2 changes: 2 additions & 0 deletions rubin_sim/maf/stackers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from .base_stacker import *
from .coord_stackers import *
from .date_stackers import *
from .general_stackers import *
from .get_col_info import *
from .label_stackers import *
Expand All @@ -9,3 +10,4 @@
from .neo_dist_stacker import *
from .sdss_stackers import *
from .sn_stacker import *
from .teff_stacker import *
140 changes: 140 additions & 0 deletions rubin_sim/maf/stackers/date_stackers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
__all__ = (
"ObservationStartDatetime64Stacker",
"DayObsStacker",
"DayObsMJDStacker",
"DayObsISOStacker",
)

import numpy as np
from astropy.time import Time

from .base_stacker import BaseStacker


class ObservationStartDatetime64Stacker(BaseStacker):
"""Add the observation start time as a numpy.datetime64."""

cols_added = ["observationStartDatetime64"]

def __init__(
self,
mjd_col="observationStartMJD",
):
self.mjd_col = mjd_col
self.units = [None]
self.cols_added_dtypes = ["datetime64[ns]"]

def _run(self, sim_data, cols_present=False):
if cols_present:
# Column already present in data; assume it is correct and does not
# need recalculating.
return sim_data

sim_data["observationStartDatetime64"] = Time(sim_data[self.mjd_col], format="mjd").datetime64

return sim_data


def _compute_day_obs_mjd(mjd):
day_obs_mjd = np.floor(mjd - 0.5).astype("int")
return day_obs_mjd


def _compute_day_obs_astropy_time(mjd):
day_obs_time = Time(_compute_day_obs_mjd(mjd), format="mjd")
return day_obs_time


def _compute_day_obs_iso8601(mjd):
iso_times = _compute_day_obs_astropy_time(mjd).iso

# Work both for mjd as a scalar and a numpy array
if isinstance(iso_times, str):
day_obs_iso = iso_times[:10]
else:
day_obs_iso = np.array([d[:10] for d in iso_times])

return day_obs_iso


def _compute_day_obs_int(mjd):
day_obs_int = np.array([d.replace("-", "") for d in _compute_day_obs_iso8601(mjd)])

return day_obs_int


class DayObsStacker(BaseStacker):
"""Add dayObs as, as defined by SITCOMTN-32.
Parameters
----------
mjd_col : `str`
The column with the observatin start MJD.
"""

cols_added = ["dayObs"]

def __init__(self, mjd_col="observationStartMJD"):
self.mjd_col = mjd_col
self.units = ["days"]
self.cols_added_dtypes = [int]

def _run(self, sim_data, cols_present=False):
if cols_present:
# Column already present in data; assume it is correct and does not
# need recalculating.
return sim_data

sim_data[self.cols_added[0]] = _compute_day_obs_int(sim_data[self.mjd_col])
return sim_data


class DayObsMJDStacker(BaseStacker):
"""Add dayObs defined by SITCOMTN-32, as an MJD.
Parameters
----------
mjd_col : `str`
The column with the observatin start MJD.
"""

cols_added = ["day_obs_mjd"]

def __init__(self, mjd_col="observationStartMJD"):
self.mjd_col = mjd_col
self.units = ["days"]
self.cols_added_dtypes = [int]

def _run(self, sim_data, cols_present=False):
if cols_present:
# Column already present in data; assume it is correct and does not
# need recalculating.
return sim_data

sim_data[self.cols_added[0]] = _compute_day_obs_mjd(sim_data[self.mjd_col])
return sim_data


class DayObsISOStacker(BaseStacker):
"""Add dayObs as defined by SITCOMTN-32, in ISO 8601 format.
Parameters
----------
mjd_col : `str`
The column with the observatin start MJD."""

cols_added = ["day_obs_iso8601"]

def __init__(self, mjd_col="observationStartMJD"):
self.mjd_col = mjd_col
self.units = [None]
self.cols_added_dtypes = [(str, 10)]

def _run(self, sim_data, cols_present=False):
if cols_present:
# Column already present in data; assume it is correct and does not
# need recalculating.
return sim_data

sim_data[self.cols_added[0]] = _compute_day_obs_iso8601(sim_data[self.mjd_col])
return sim_data
Loading

0 comments on commit 8f02edc

Please sign in to comment.