Skip to content

Commit

Permalink
formatting changes
Browse files Browse the repository at this point in the history
  • Loading branch information
cpelley committed Oct 22, 2024
1 parent 77723c3 commit d3a1905
Show file tree
Hide file tree
Showing 299 changed files with 1,354 additions and 1,951 deletions.
4 changes: 2 additions & 2 deletions doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "Improver.tex", "IMPROVER Documentation", "Met Office", "manual"),
(master_doc, "Improver.tex", "IMPROVER Documentation", "Met Office", "manual")
]

# The name of an image file (relative to this directory) to place at the top of
Expand Down Expand Up @@ -347,7 +347,7 @@
"IMPROVER",
"One line description of project.",
"Miscellaneous",
),
)
]

# Documents to append as an appendix to all manuals.
Expand Down
1 change: 1 addition & 0 deletions improver/api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
by making them available to a single flat namespace. This also protects end-users from
changes in structure to IMPROVER impacting their use of the plugins.
"""

from importlib import import_module

# alphabetically sorted IMPROVER plugin lookup
Expand Down
10 changes: 5 additions & 5 deletions improver/between_thresholds.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
#
# This file is part of 'IMPROVER' and is released under the BSD 3-Clause license.
# See LICENSE in the root of the repository for full licensing details.
"""Plugin to calculate probabilities of occurrence between specified thresholds
"""
"""Plugin to calculate probabilities of occurrence between specified thresholds"""

from typing import List

Expand Down Expand Up @@ -46,8 +45,9 @@ def __init__(
threshold_diffs = np.diff(threshold_ranges)
if any(diff < 1e-5 for diff in threshold_diffs):
raise ValueError(
"Plugin cannot distinguish between thresholds at "
"{} {}".format(threshold_ranges, threshold_units)
"Plugin cannot distinguish between thresholds at " "{} {}".format(
threshold_ranges, threshold_units
)
)
self.threshold_ranges = threshold_ranges
self.threshold_units = threshold_units
Expand Down Expand Up @@ -137,7 +137,7 @@ def _calculate_probabilities(self) -> Cube:
thresh_name = self.thresh_coord.name()

cubelist = iris.cube.CubeList([])
for (lower_cube, upper_cube) in self.cube_slices:
for lower_cube, upper_cube in self.cube_slices:
# construct difference cube
between_thresholds_data = (lower_cube.data - upper_cube.data) * multiplier
between_thresholds_cube = upper_cube.copy(between_thresholds_data)
Expand Down
59 changes: 30 additions & 29 deletions improver/blending/weighted_blend.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# This file is part of 'IMPROVER' and is released under the BSD 3-Clause license.
# See LICENSE in the root of the repository for full licensing details.
"""Module containing classes for doing weighted blending by collapsing a
whole dimension."""
whole dimension."""

import warnings
from typing import List, Optional, Union
Expand Down Expand Up @@ -204,8 +204,9 @@ def process(
for cube in cubelist:
if "model" not in self.blend_coord and not cube.coords(self.blend_coord):
raise ValueError(
"{} coordinate is not present on all input "
"cubes".format(self.blend_coord)
"{} coordinate is not present on all input " "cubes".format(
self.blend_coord
)
)

# create model ID and model configuration coordinates if blending
Expand All @@ -221,29 +222,29 @@ def process(
class PercentileBlendingAggregator:
"""Class for the percentile blending aggregator
This class implements the method described by Combining Probabilities
by Caroline Jones, 2017. This method implements blending in probability
space.
The steps are:
1. At each geographic point in the cube we take the percentile
threshold's values across the percentile dimensional coordinate.
We recalculate, using linear interpolation, their probabilities
in the pdf of the other points across the coordinate we are
blending over. Thus at each point we have a set of thresholds
and their corresponding probability values in each of the
probability spaces across the blending coordinate.
2. We do a weighted blend across all the probability spaces,
combining all the thresholds in all the points in the coordinate
we are blending over. This gives us an array of thresholds and an
array of blended probabilities for each of the grid points.
3. We convert back to the original percentile values, again using
linear interpolation, resulting in blended values at each of the
original percentiles.
References:
:download:`Combining Probabilities by Caroline Jones, 2017
<../files/Combining_Probabilities.pdf>`
This class implements the method described by Combining Probabilities
by Caroline Jones, 2017. This method implements blending in probability
space.
The steps are:
1. At each geographic point in the cube we take the percentile
threshold's values across the percentile dimensional coordinate.
We recalculate, using linear interpolation, their probabilities
in the pdf of the other points across the coordinate we are
blending over. Thus at each point we have a set of thresholds
and their corresponding probability values in each of the
probability spaces across the blending coordinate.
2. We do a weighted blend across all the probability spaces,
combining all the thresholds in all the points in the coordinate
we are blending over. This gives us an array of thresholds and an
array of blended probabilities for each of the grid points.
3. We convert back to the original percentile values, again using
linear interpolation, resulting in blended values at each of the
original percentiles.
References:
:download:`Combining Probabilities by Caroline Jones, 2017
<../files/Combining_Probabilities.pdf>`
"""

@staticmethod
Expand Down Expand Up @@ -311,7 +312,7 @@ def aggregate(
def blend_percentiles(
perc_values: ndarray, percentiles: ndarray, weights: ndarray
) -> ndarray:
""" Blend percentiles function, to calculate the weighted blend across
"""Blend percentiles function, to calculate the weighted blend across
a given axis of percentile data for a single grid point.
Args:
Expand Down Expand Up @@ -362,8 +363,8 @@ def blend_percentiles(

class WeightedBlendAcrossWholeDimension(PostProcessingPlugin):
"""Apply a Weighted blend to a cube, collapsing across the whole
dimension. Uses one of two methods, either weighted average, or
the maximum of the weighted probabilities."""
dimension. Uses one of two methods, either weighted average, or
the maximum of the weighted probabilities."""

def __init__(self, blend_coord: str, timeblending: bool = False) -> None:
"""Set up for a Weighted Blending plugin
Expand Down
21 changes: 9 additions & 12 deletions improver/blending/weights.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@


class WeightsUtilities:
""" Utilities for Weight processing. """
"""Utilities for Weight processing."""

def __repr__(self) -> str:
"""Represent the configured plugin instance as a string."""
Expand Down Expand Up @@ -67,7 +67,7 @@ def normalise_weights(weights: ndarray, axis: Optional[int] = None) -> ndarray:
return normalised_weights

@staticmethod
def build_weights_cube(cube: Cube, weights: ndarray, blending_coord: str,) -> Cube:
def build_weights_cube(cube: Cube, weights: ndarray, blending_coord: str) -> Cube:
"""Build a cube containing weights for use in blending.
Args:
Expand Down Expand Up @@ -386,12 +386,9 @@ def _calculate_weights(self, cube: Cube) -> Cube:
has been renamed using the self.weights_key_name but
otherwise matches the input cube.
"""
(
source_points,
target_points,
source_weights,
fill_value,
) = self._get_interpolation_inputs_from_dict(cube)
(source_points, target_points, source_weights, fill_value) = (
self._get_interpolation_inputs_from_dict(cube)
)
axis = 0

weights = self._interpolate_to_find_weights(
Expand Down Expand Up @@ -499,7 +496,7 @@ def process(self, cubes: Union[Cube, CubeList]) -> Cube:


class ChooseDefaultWeightsLinear(BasePlugin):
""" Calculate Default Weights using Linear Function. """
"""Calculate Default Weights using Linear Function."""

def __init__(self, y0val: float, ynval: float) -> None:
"""
Expand Down Expand Up @@ -592,7 +589,7 @@ def __repr__(self):


class ChooseDefaultWeightsNonLinear(BasePlugin):
""" Calculate Default Weights using NonLinear Function. """
"""Calculate Default Weights using NonLinear Function."""

def __init__(self, cval: float) -> None:
"""
Expand Down Expand Up @@ -645,7 +642,7 @@ def nonlinear_weights(self, num_of_weights: int) -> ndarray:
return weights

def process(
self, cube: Cube, coord_name: str, inverse_ordering: bool = False,
self, cube: Cube, coord_name: str, inverse_ordering: bool = False
) -> Cube:
"""
Calculate nonlinear weights for a given cube and coord.
Expand Down Expand Up @@ -703,7 +700,7 @@ def __repr__(self):


class ChooseDefaultWeightsTriangular(BasePlugin):
""" Calculate Default Weights using a Triangular Function. """
"""Calculate Default Weights using a Triangular Function."""

def __init__(self, width: float, units: Union[Unit, str] = "no_unit") -> None:
"""Set up for calculating default weights using triangular function.
Expand Down
2 changes: 1 addition & 1 deletion improver/calibration/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def split_forecasts_and_truth(


def split_forecasts_and_coeffs(
cubes: CubeList, land_sea_mask_name: Optional[str] = None,
cubes: CubeList, land_sea_mask_name: Optional[str] = None
):
"""Split the input forecast, coefficients, static additional predictors,
land sea-mask and probability template, if provided. The coefficients
Expand Down
45 changes: 24 additions & 21 deletions improver/calibration/dataframe_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
"""

import warnings
from typing import List, Optional, Sequence, Tuple

Expand All @@ -28,10 +29,7 @@
from improver.metadata.constants.time_types import TIME_COORDS
from improver.spotdata.build_spotdata_cube import build_spotdata_cube

REPRESENTATION_COLUMNS = [
"percentile",
"realization",
]
REPRESENTATION_COLUMNS = ["percentile", "realization"]

FORECAST_DATAFRAME_COLUMNS = [
"altitude",
Expand Down Expand Up @@ -219,7 +217,6 @@ def _ensure_consistent_static_cols(
# Check if any of the assumed static columns are actually not static when
# the DataFrame is grouped by the site_id_col.
if (forecast_df.groupby(site_id_col)[static_cols].nunique().nunique() > 1).any():

for static_col in static_cols:
# For each static column, find the last value from the list of unique
# values for each site. The last value corresponds to the most recent value
Expand All @@ -236,7 +233,7 @@ def _ensure_consistent_static_cols(


def _define_time_coord(
adate: pd.Timestamp, time_bounds: Optional[Sequence[pd.Timestamp]] = None,
adate: pd.Timestamp, time_bounds: Optional[Sequence[pd.Timestamp]] = None
) -> DimCoord:
"""Define a time coordinate. The coordinate will have bounds,
if bounds are provided.
Expand All @@ -254,12 +251,14 @@ def _define_time_coord(
return DimCoord(
np.array(adate.timestamp(), dtype=TIME_COORDS["time"].dtype),
"time",
bounds=time_bounds
if time_bounds is None
else [
np.array(t.timestamp(), dtype=TIME_COORDS["time"].dtype)
for t in time_bounds
],
bounds=(
time_bounds
if time_bounds is None
else [
np.array(t.timestamp(), dtype=TIME_COORDS["time"].dtype)
for t in time_bounds
]
),
units=TIME_COORDS["time"].units,
)

Expand All @@ -274,7 +273,7 @@ def _define_height_coord(height) -> AuxCoord:
Returns:
The height coordinate.
"""
return AuxCoord(np.array(height, dtype=np.float32), "height", units="m",)
return AuxCoord(np.array(height, dtype=np.float32), "height", units="m")


def _training_dates_for_calibration(
Expand Down Expand Up @@ -573,7 +572,7 @@ def _prepare_dataframes(


def forecast_dataframe_to_cube(
df: DataFrame, training_dates: DatetimeIndex, forecast_period: int,
df: DataFrame, training_dates: DatetimeIndex, forecast_period: int
) -> Cube:
"""Convert a forecast DataFrame into an iris Cube. The percentiles
within the forecast DataFrame are rebadged as realizations.
Expand Down Expand Up @@ -629,12 +628,16 @@ def forecast_dataframe_to_cube(
fp_point.total_seconds(), dtype=TIME_COORDS["forecast_period"].dtype
),
"forecast_period",
bounds=fp_bounds
if fp_bounds is None
else [
np.array(f.total_seconds(), dtype=TIME_COORDS["forecast_period"].dtype)
for f in fp_bounds
],
bounds=(
fp_bounds
if fp_bounds is None
else [
np.array(
f.total_seconds(), dtype=TIME_COORDS["forecast_period"].dtype
)
for f in fp_bounds
]
),
units=TIME_COORDS["forecast_period"].units,
)
frt_coord = AuxCoord(
Expand Down Expand Up @@ -694,7 +697,7 @@ def forecast_dataframe_to_cube(
return cube


def truth_dataframe_to_cube(df: DataFrame, training_dates: DatetimeIndex,) -> Cube:
def truth_dataframe_to_cube(df: DataFrame, training_dates: DatetimeIndex) -> Cube:
"""Convert a truth DataFrame into an iris Cube.
Args:
Expand Down
4 changes: 1 addition & 3 deletions improver/calibration/dz_rescaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@


class EstimateDzRescaling(PostProcessingPlugin):

"""Estimate a rescaling of the input forecasts based on the difference in
altitude between the grid point and the site."""

Expand Down Expand Up @@ -143,7 +142,7 @@ def _compute_scaled_dz(self, scale_factor: float, dz: np.ndarray) -> np.ndarray:
return np.clip(scaled_dz.data, scaled_dz_lower, scaled_dz_upper)

def _compute_scaled_dz_cube(
self, forecast: Cube, dz: Cube, scale_factor: float,
self, forecast: Cube, dz: Cube, scale_factor: float
) -> Cube:
"""Compute the scaled difference in altitude and ensure that the output cube
has the correct metadata.
Expand Down Expand Up @@ -289,7 +288,6 @@ def process(self, forecasts: Cube, truths: Cube, neighbour_cube: Cube) -> Cube:


class ApplyDzRescaling(PostProcessingPlugin):

"""Apply rescaling of the forecast using the difference in altitude between the
grid point and the site."""

Expand Down
Loading

0 comments on commit d3a1905

Please sign in to comment.