Skip to content

Commit

Permalink
Yet another line length update to technical_metrics.py
Browse files Browse the repository at this point in the history
  • Loading branch information
rhiannonlynne committed Sep 29, 2023
1 parent a127a35 commit d73fd1b
Showing 1 changed file with 59 additions and 46 deletions.
105 changes: 59 additions & 46 deletions rubin_sim/maf/metrics/technical_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@


class NChangesMetric(BaseMetric):
"""
Compute the number of times a column value changes.
"""Compute the number of times a column value changes.
(useful for filter changes in particular).
"""

Expand All @@ -31,10 +30,16 @@ def run(self, data_slice, slice_point=None):


class MinTimeBetweenStatesMetric(BaseMetric):
"""
Compute the minimum time between changes of state in a column value.
"""Compute the minimum time between changes of state in a column value.
(useful for calculating fastest time between filter changes in particular).
Returns delta time in minutes!
Parameters
----------
change_col : `str`
Column that we are tracking changes in.
time_col : str
Column with the time of each visit
"""

def __init__(
Expand All @@ -44,10 +49,6 @@ def __init__(
metric_name=None,
**kwargs,
):
"""
change_col = column that changes state
time_col = column tracking time of each visit
"""
self.change_col = change_col
self.time_col = time_col
if metric_name is None:
Expand All @@ -57,7 +58,7 @@ def __init__(
)

def run(self, data_slice, slice_point=None):
# Sort on time, to be sure we've got filter (or other col) changes in the right order.
# Sort on time, to be sure we've got changes in the right order.
idxs = np.argsort(data_slice[self.time_col])
changes = data_slice[self.change_col][idxs][1:] != data_slice[self.change_col][idxs][:-1]
condition = np.where(changes == True)[0]
Expand All @@ -79,7 +80,15 @@ class NStateChangesFasterThanMetric(BaseMetric):
"""
Compute the number of changes of state that happen faster than 'cutoff'.
(useful for calculating time between filter changes in particular).
'cutoff' should be in minutes.
Parameters
----------
change_col : `str`
Column that we are tracking changes in.
time_col : str
Column with the time of each visit
cutoff : `float`
The cutoff value for the time between changes (in minutes).
"""

def __init__(
Expand All @@ -90,25 +99,21 @@ def __init__(
cutoff=20,
**kwargs,
):
"""
col = column tracking changes in
time_col = column keeping the time of each visit
cutoff = the cutoff value for the reduce method 'NBelow'
"""
if metric_name is None:
metric_name = "Number of %s changes faster than <%.1f minutes" % (
change_col,
cutoff,
)
self.change_col = change_col
self.time_col = time_col
self.cutoff = cutoff / 24.0 / 60.0 # Convert cutoff from minutes to days.
# Convert cutoff from minutes to days.
self.cutoff = cutoff / 24.0 / 60.0
super(NStateChangesFasterThanMetric, self).__init__(
col=[change_col, time_col], metric_name=metric_name, units="#", **kwargs
)

def run(self, data_slice, slice_point=None):
# Sort on time, to be sure we've got filter (or other col) changes in the right order.
# Sort on time, to be sure we've got changes in the right order.
idxs = np.argsort(data_slice[self.time_col])
changes = data_slice[self.change_col][idxs][1:] != data_slice[self.change_col][idxs][:-1]
condition = np.where(changes == True)[0]
Expand All @@ -124,10 +129,18 @@ def run(self, data_slice, slice_point=None):


class MaxStateChangesWithinMetric(BaseMetric):
"""
Compute the maximum number of changes of state that occur within a given timespan.
"""Compute the maximum number of changes of state that occur
within a given timespan.
(useful for calculating time between filter changes in particular).
'timespan' should be in minutes.
Parameters
----------
change_col : `str`
Column that we are tracking changes in.
time_col : str
Column with the time of each visit
timespan : `float`
The timespan to count the number of changes within (in minutes).
"""

def __init__(
Expand All @@ -138,11 +151,6 @@ def __init__(
timespan=20,
**kwargs,
):
"""
col = column tracking changes in
time_col = column keeping the time of each visit
timespan = the timespan to count the number of changes within (in minutes)
"""
if metric_name is None:
metric_name = "Max number of %s changes within %.1f minutes" % (
change_col,
Expand All @@ -156,12 +164,13 @@ def __init__(
)

def run(self, data_slice, slice_point=None):
# This operates slightly differently from the metrics above; those calculate only successive times
# between changes, but here we must calculate the actual times of each change.
# This operates slightly differently from the metrics above;
# those calculate only successive times between changes, but here
# we must calculate the actual times of each change.
# Check if there was only one observation (and return 0 if so).
if data_slice[self.change_col].size == 1:
return 0
# Sort on time, to be sure we've got filter (or other col) changes in the right order.
# Sort on time, to be sure we've got changes in the right order.
idxs = np.argsort(data_slice[self.time_col])
changes = data_slice[self.change_col][idxs][:-1] != data_slice[self.change_col][idxs][1:]
condition = np.where(changes == True)[0]
Expand Down Expand Up @@ -245,14 +254,15 @@ def run(self, data_slice, slice_point=None):
teff += (10.0 ** (0.8 * (data_slice[self.m5_col][match] - self.depth[f]))).sum()
teff *= self.teff_base
if self.normed:
# Normalize by the t_eff if each observation was at the fiducial depth.
# Normalize by the t_eff equivalent if each observation
# was at the fiducial depth.
teff = teff / (self.teff_base * data_slice[self.m5_col].size)
return teff


class OpenShutterFractionMetric(BaseMetric):
"""
Compute the fraction of time the shutter is open compared to the total time spent observing.
"""Compute the fraction of time the shutter is open
compared to the total time spent observing.
"""

def __init__(
Expand Down Expand Up @@ -287,9 +297,24 @@ def run(self, data_slice, slice_point=None):

class BruteOSFMetric(BaseMetric):
"""Assume I can't trust the slewtime or visittime colums.
This computes the fraction of time the shutter is open, with no penalty for the first exposure
after a long gap (e.g., 1st exposure of the night). Presumably, the telescope will need to focus,
so there's not much a scheduler could do to optimize keeping the shutter open after a closure.
This computes the fraction of time the shutter is open,
with no penalty for the first exposure after a long gap
(e.g., 1st exposure of the night).
Presumably, the telescope will need to focus, so there's not much a
scheduler could do to optimize keeping the shutter open after a closure.
Parameters
----------
maxgap : `float`
The maximum gap between observations, in minutes.
Assume anything longer the dome has closed.
fudge : `float`
Fudge factor if a constant has to be added to the exposure time values.
This time (in seconds) is added to the exposure time.
exp_time_col : `str`
The name of the exposure time column. Assumed to be in seconds.
mjd_col : `str`
The name of the start of the exposures. Assumed to be in units of days.
"""

def __init__(
Expand All @@ -301,18 +326,6 @@ def __init__(
fudge=0.0,
**kwargs,
):
"""
Parameters
----------
maxgap : float (10.)
The maximum gap between observations. Assume anything longer the dome has closed.
fudge : float (0.)
Fudge factor if a constant has to be added to the exposure time values (like in OpSim 3.61).
exp_time_col : str ('expTime')
The name of the exposure time column. Assumed to be in seconds.
mjd_col : str ('observationStartMJD')
The name of the start of the exposures. Assumed to be in units of days.
"""
self.exp_time_col = exp_time_col
self.maxgap = maxgap / 60.0 / 24.0 # convert from min to days
self.mjd_col = mjd_col
Expand Down

0 comments on commit d73fd1b

Please sign in to comment.