diff --git a/docs/source/_static/cdsw_1.png b/docs/source/_static/cdsw_1.png new file mode 100644 index 00000000..db2f8f6f Binary files /dev/null and b/docs/source/_static/cdsw_1.png differ diff --git a/docs/source/_static/cdsw_2.png b/docs/source/_static/cdsw_2.png new file mode 100644 index 00000000..4a09e6cd Binary files /dev/null and b/docs/source/_static/cdsw_2.png differ diff --git a/docs/source/g_cookbook.rst b/docs/source/g_cookbook.rst index ad25fef5..9416bca2 100644 --- a/docs/source/g_cookbook.rst +++ b/docs/source/g_cookbook.rst @@ -16,6 +16,7 @@ into any one category. z_turns.rst z_quantlib.rst z_curve_from_zero_rates.ipynb + z_cdsw.rst **FX Volatility Surface Building** diff --git a/docs/source/i_api.rst b/docs/source/i_api.rst index fe820a13..99f5d212 100644 --- a/docs/source/i_api.rst +++ b/docs/source/i_api.rst @@ -290,6 +290,7 @@ Cookbook z_turns.rst z_quantlib.rst z_curve_from_zero_rates.ipynb + z_cdsw.rst **FX Volatility Surface Building** diff --git a/docs/source/i_whatsnew.rst b/docs/source/i_whatsnew.rst index 80dfaa87..c7f6d8a8 100644 --- a/docs/source/i_whatsnew.rst +++ b/docs/source/i_whatsnew.rst @@ -40,6 +40,11 @@ email contact, see `rateslib `_. * - Bug - :class:`~rateslib.curves.MultiCsaCurve` is now included in the main namespace. (`436 `_) + * - Bug + - Adding *Dual* or *Dual2* type ``spread`` using :meth:`~rateslib.curves.Curve.shift` method + now avoids *TypeErrors* where possible and maintains appropriate AD orders for each + existing and new object. + (`440 `_) 1.5.0 (25th September 2024) **************************** diff --git a/docs/source/z_cdsw.rst b/docs/source/z_cdsw.rst new file mode 100644 index 00000000..0763819c --- /dev/null +++ b/docs/source/z_cdsw.rst @@ -0,0 +1,204 @@ +.. _cook-cdsw-doc: + +.. ipython:: python + :suppress: + + from rateslib.curves import * + from rateslib.instruments import * + from rateslib.calendars import add_tenor + from rateslib.solver import Solver + import matplotlib.pyplot as plt + from datetime import datetime as dt + import numpy as np + from pandas import DataFrame, option_context + +Replicating a Pfizer Default Curve & CDS from Bloomberg's CDSW +***************************************************************** + +.. raw:: html + +
+ + SWPM type Curve in Excel using rateslib-excel + +
+ +Some collected data at a point in time on Friday 4th Oct 2024 can be loaded into the CDSW function in Bloomberg +for the single security Pfizer US, with the intention of pricing and risking a 5Y Pfizer CDS. +The raw data necessary to build the curves and replicate the pricing risk metrics is added to Python. +(Some of the loaded CDS data is also shown in an image at the bottom of this page) + +.. ipython:: python + + irs_tenor = ["1m", "2m", "3m", "6m", "12m", "2y", "3y", "4y", "5y", "6y", "7y", "8y", "9y", "10y", "12y"] + irs_rates = irs_rates = [4.8457, 4.7002, 4.5924, 4.3019, 3.8992, 3.5032, 3.3763, 3.3295, 3.3165, 3.3195, 3.3305, 3.3450, 3.3635, 3.3830, 3.4245] + cds_tenor = ["6m", "12m", "2y", "3y", "4y", "5y", "7y", "10y"] + cds_rates = [11.011, 14.189, 20.750, 26.859, 32.862, 37.861, 51.068, 66.891] + +.. image:: _static/cdsw_1.png + :alt: SOFR discount data + :width: 300 + +Since the SOFR curve is a rates component, independent from the Pfizer credit component, we will build that, +separately, first. This is no different to any of the other tutorials or examples constructing a basic +SOFR curve - it places node dates at the maturity of each IRS and solves the curve with exact precision. + +.. ipython:: python + + today = dt(2024, 10, 4) # Friday 4th October 2024 + spot = dt(2024, 10, 8) # Tuesday 8th October 2024 + + disc_curve = Curve( + nodes={ + today: 1.0, + **{add_tenor(spot, _, "mf", "nyc"): 1.0 for _ in irs_tenor} + }, + calendar="nyc", + convention="act360", + interpolation="log_linear", + id="sofr" + ) + + us_rates_sv = Solver( + curves=[disc_curve], + instruments=[ + IRS(spot, _, spec="usd_irs", curves="sofr") for _ in irs_tenor + ], + s=irs_rates, + instrument_labels=irs_tenor, + id="us_rates" + ) + +This framework for: + +- **firstly,** structuring a *Curve* with chosen hyper-parameters such as *nodes*, *interpolation*, etc., +- **secondly,** calibrating that *Curve* with a chosen set of market *Instruments*, + +should now be familiar. Here we will create a hazard *Curve* for Pfizer using node dates at constant tenor points +(notice these are **not** the maturity of the so called *credit-imm dates* - but they could be if you wanted) and +then calibrate the curve with proper CDS market instruments and prices. + +.. ipython:: python + + cds_eff = dt(2024, 9, 20) + cds_mats = [add_tenor(dt(2024, 12, 20), _, "mf", "all") for _ in cds_tenor] + + hazard_curve = Curve( + nodes={ + today: 1.0, + **{add_tenor(spot, _, "mf", "nyc"): 1.0 for _ in cds_tenor} + }, + calendar="all", + convention="act365f", + interpolation="log_linear", + id="pfizer" + ) + + pfizer_sv = Solver( + curves=[hazard_curve], + pre_solvers=[us_rates_sv], + instruments=[ + CDS(cds_eff, _, frequency="Q", calendar="nyc", curves=["pfizer", "sofr"]) for _ in cds_mats + ], + s=cds_rates, + instrument_labels=cds_tenor, + id="pfizer_cds" + ) + +Lets look at the structure of the hazard rates generated. To do this we plot the *'1d'* overnight rates of the +*'pfizer'* hazard curve. + +.. ipython:: python + + hazard_curve.plot("1d") + +.. plot:: + + from rateslib import * + import matplotlib.pyplot as plt + irs_tenor = ["1m", "2m", "3m", "6m", "12m", "2y", "3y", "4y", "5y", "6y", "7y", "8y", "9y", "10y", "12y"] + irs_rates = irs_rates = [4.8457, 4.7002, 4.5924, 4.3019, 3.8992, 3.5032, 3.3763, 3.3295, 3.3165, 3.3195, 3.3305, 3.3450, 3.3635, 3.3830, 3.4245] + cds_tenor = ["6m", "12m", "2y", "3y", "4y", "5y", "7y", "10y"] + cds_rates = [11.011, 14.189, 20.750, 26.859, 32.862, 37.861, 51.068, 66.891] + today = dt(2024, 10, 4) # Friday 4th October 2024 + spot = dt(2024, 10, 8) # Tuesday 8th October 2024 + disc_curve = Curve( + nodes={ + today: 1.0, + **{add_tenor(spot, _, "mf", "nyc"): 1.0 for _ in irs_tenor} + }, + calendar="nyc", + convention="act360", + interpolation="log_linear", + id="sofr" + ) + us_rates_sv = Solver( + curves=[disc_curve], + instruments=[ + IRS(spot, _, spec="usd_irs", curves="sofr") for _ in irs_tenor + ], + s=irs_rates, + instrument_labels=irs_tenor, + id="us_rates" + ) + cds_eff = dt(2024, 9, 20) + cds_mats = [add_tenor(dt(2024, 12, 20), _, "mf", "all") for _ in cds_tenor] + hazard_curve = Curve( + nodes={ + today: 1.0, + **{add_tenor(spot, _, "mf", "nyc"): 1.0 for _ in cds_tenor} + }, + calendar="all", + convention="act365f", + interpolation="log_linear", + id="pfizer" + ) + pfizer_sv = Solver( + curves=[hazard_curve], + pre_solvers=[us_rates_sv], + instruments=[ + CDS(cds_eff, _, frequency="Q", calendar="nyc", curves=["pfizer", "sofr"]) for _ in cds_mats + ], + s=cds_rates, + instrument_labels=cds_tenor, + id="pfizer_cds" + ) + fig, ax, line = hazard_curve.plot("1d", labels=["Pfizer Hazard Rate"]) + plt.show() + plt.close() + +By definition, the probabilities of survival are calculable directly from the hazard *Curve*. + +.. ipython:: python + + hazard_curve[dt(2025, 10, 4)] # Probability Pfizer survives at least 1yr. + hazard_curve[dt(2029, 10, 4)] # Probability Pfizer survives at least 5yr. + hazard_curve[dt(2034, 10, 4)] # Probability Pfizer survives at least 10yr. + +Pricing and risk metrics are calculable within *rateslib's* natural framework. Let's build the traditional +5Y Pfizer CDS and compare the numbers to Bloombergs calculator (these may not necessarily be exactly the same due to +different hyper-parameter choices for the curves and any pricing rounding, including discretization choices for the +numerical integrations of CDS protection and premium legs). + +.. ipython:: python + + cds = CDS( + effective=dt(2024, 9, 20), + termination=dt(2029, 12, 20), + frequency="q", + convention="act360", + calendar="nyc", + curves=["pfizer", "sofr"], + credit_spread=100.0, + recovery_rate=0.4, + premium_accrued=True, + notional=10e6, + ) + cds.rate(solver=pfizer_sv) # this compares to BBG: "Trd Sprd (bp)" + cds.npv(solver=pfizer_sv) # this compares to BBG: "Cash Amount" + cds.analytic_delta(hazard_curve, disc_curve) + cds.delta(solver=pfizer_sv).groupby("solver").sum() # this compares to: "Spread DV01" and "IR DV01" + +.. image:: _static/cdsw_2.png + :alt: Pfizer CDS data + :width: 725 diff --git a/python/rateslib/__init__.py b/python/rateslib/__init__.py index 55748f3d..e5aa6908 100644 --- a/python/rateslib/__init__.py +++ b/python/rateslib/__init__.py @@ -72,6 +72,7 @@ def __exit__(self, *args) -> None: from rateslib.fx import FXForwards, FXRates from rateslib.fx_volatility import FXDeltaVolSmile, FXDeltaVolSurface from rateslib.instruments import ( + CDS, FRA, IIRS, IRS, @@ -216,6 +217,7 @@ def __exit__(self, *args) -> None: "IndexFixedRateBond", "FloatRateNote", "BondFuture", + "CDS", "FRA", "CDS", "Value", diff --git a/python/rateslib/curves/curves.py b/python/rateslib/curves/curves.py index c5aae0b6..bf73d728 100644 --- a/python/rateslib/curves/curves.py +++ b/python/rateslib/curves/curves.py @@ -630,7 +630,7 @@ def csolve(self) -> None: def shift( self, - spread: float, + spread: DualTypes, id: str | NoInput = NoInput(0), composite: bool = True, collateral: str | NoInput = NoInput(0), @@ -648,6 +648,13 @@ def shift( ---------- spread : float, Dual, Dual2 The number of basis points added to the existing curve. + + .. warning:: + + If ``composite`` is *True*, users must be aware that adding *Dual* or *Dual2* + spreads must be compatible with the AD order of *Self*, otherwise *TypeErrors* + may be raised. If in doubt, only use *float* spread values. + id : str, optional Set the id of the returned curve. composite: bool, optional @@ -750,6 +757,13 @@ def shift( return _ else: # use non-composite method, which is faster but does not preserve a dynamic spread. + # Make sure base curve ADorder matches the spread ADorder. Floats are universal + _ad = self.ad + if isinstance(spread, Dual): + self._set_ad_order(1) + elif isinstance(spread, Dual2): + self._set_ad_order(2) + v1v2 = [1.0] * (self.n - 1) n = [0] * (self.n - 1) d = 1 / 365 if self.convention.upper() != "ACT360" else 1 / 360 @@ -782,6 +796,7 @@ def shift( **kwargs, ) _.collateral = collateral + self._set_ad_order(_ad) return _ def _translate_nodes(self, start: datetime): @@ -1470,6 +1485,13 @@ def shift( ---------- spread : float, Dual, Dual2 The number of basis points added to the existing curve. + + .. warning:: + + If ``composite`` is *True*, users must be aware that adding *Dual* or *Dual2* + spreads must be compatible with the AD order of *Self*, otherwise *TypeErrors* + may be raised. If in doubt, only use *float* spread values. + id : str, optional Set the id of the returned curve. composite: bool, optional @@ -1538,6 +1560,14 @@ def shift( """ if composite: return super().shift(spread, id, composite, collateral) + + # Make sure base curve ADorder matches the spread ADorder. Floats are universal + _ad = self.ad + if isinstance(spread, Dual): + self._set_ad_order(1) + elif isinstance(spread, Dual2): + self._set_ad_order(2) + _ = LineCurve( nodes={k: v + spread / 100 for k, v in self.nodes.items()}, interpolation=self.interpolation, @@ -1551,6 +1581,7 @@ def shift( ad=self.ad, ) _.collateral = collateral + self._set_ad_order(_ad) return _ def _translate_nodes(self, start: datetime): diff --git a/python/rateslib/instruments/__init__.py b/python/rateslib/instruments/__init__.py index 366e3d37..7131078c 100644 --- a/python/rateslib/instruments/__init__.py +++ b/python/rateslib/instruments/__init__.py @@ -715,22 +715,31 @@ def oaspread( base, self.leg1.currency, ) - ad_ = curves[1].ad metric = "dirty_price" if dirty else "clean_price" - curves[1]._set_ad_order(1) + # Create a discounting curve with ADOrder:1 exposure to z_spread disc_curve = curves[1].shift(Dual(0, ["z_spread"], []), composite=False) - npv_price = self.rate(curves=[curves[0], disc_curve], metric=metric) + # Get forecasting curve + if type(self).__name__ in ["FloatRateNote", "IndexFixedRateBond"]: + fore_curve = curves[0].copy() + fore_curve._set_ad_order(1) + elif type(self).__name__ in ["FixedRateBond", "Bill"]: + fore_curve = None + else: + raise TypeError("Method `oaspread` can only be called on Bond type securities.") + + npv_price = self.rate(curves=[fore_curve, disc_curve], metric=metric) # find a first order approximation of z b = gradient(npv_price, ["z_spread"], 1)[0] c = float(npv_price) - float(price) z_hat = -c / b # shift the curve to the first order approximation and fine tune with 2nd order approxim. - curves[1]._set_ad_order(2) disc_curve = curves[1].shift(Dual2(z_hat, ["z_spread"], [], []), composite=False) - npv_price = self.rate(curves=[curves[0], disc_curve], metric=metric) + if fore_curve is not None: + fore_curve._set_ad_order(2) + npv_price = self.rate(curves=[fore_curve, disc_curve], metric=metric) a, b, c = ( 0.5 * gradient(npv_price, ["z_spread"], 2)[0][0], gradient(npv_price, ["z_spread"], 1)[0], @@ -739,15 +748,16 @@ def oaspread( z_hat2 = quadratic_eqn(a, b, c, x0=-c / b)["g"] # perform one final approximation albeit the additional price calculation slows calc time - curves[1]._set_ad_order(0) disc_curve = curves[1].shift(z_hat + z_hat2, composite=False) - npv_price = self.rate(curves=[curves[0], disc_curve], metric=metric) + disc_curve._set_ad_order(0) + if fore_curve is not None: + fore_curve._set_ad_order(0) + npv_price = self.rate(curves=[fore_curve, disc_curve], metric=metric) b = b + 2 * a * z_hat2 # forecast the new gradient c = float(npv_price) - float(price) z_hat3 = -c / b z = z_hat + z_hat2 + z_hat3 - curves[1]._set_ad_order(ad_) return z diff --git a/python/tests/test_curves.py b/python/tests/test_curves.py index 4624c736..2d10ea8e 100644 --- a/python/tests/test_curves.py +++ b/python/tests/test_curves.py @@ -821,6 +821,26 @@ def test_indexcurve_shift_dual_input() -> None: assert result_curve.index_base == curve.index_base +@pytest.mark.parametrize("c_obj", ["c", "l", "i"]) +@pytest.mark.parametrize("ini_ad", [0, 1, 2]) +@pytest.mark.parametrize("spread", [Dual(1.0, ["z"], []), Dual2(1.0, ["z"], [], [])]) +@pytest.mark.parametrize("composite", [False]) +def test_curve_shift_ad_orders(curve, line_curve, index_curve, c_obj, ini_ad, spread, composite): + if c_obj == "c": + c = curve + elif c_obj == "l": + c = line_curve + else: + c = index_curve + c._set_ad_order(ini_ad) + result = c.shift(spread, composite=composite) + + if isinstance(spread, Dual): + assert result.ad == 1 + else: + assert result.ad == 2 + + @pytest.mark.parametrize( ("crv", "t", "tol"), [ diff --git a/python/tests/test_instruments_bonds.py b/python/tests/test_instruments_bonds.py index 18673e68..8817df24 100644 --- a/python/tests/test_instruments_bonds.py +++ b/python/tests/test_instruments_bonds.py @@ -734,8 +734,8 @@ def test_fixed_rate_bond_duration(self, metric) -> None: ex_div=7, fixed_rate=8.0, ) - price0 = gilt.price(4.445, dt(1999, 5, 27)) - price1 = gilt.price(4.446, dt(1999, 5, 27)) + price0 = gilt.price(4.445, dt(1999, 5, 27), dirty=True) + price1 = gilt.price(4.446, dt(1999, 5, 27), dirty=True) if metric == "risk": numeric = price0 - price1 elif metric == "modified": diff --git a/rust/main.rs b/rust/main.rs index 3e499b6f..e72c754c 100644 --- a/rust/main.rs +++ b/rust/main.rs @@ -25,27 +25,27 @@ fn main() { let now = SystemTime::now(); - for i in 0..10000 { + for _i in 0..10000 { let _ = ops(&a0, &b0); } println!("{:.5?} time taken for f64", now.elapsed()); - for i in 0..10000 { + for _i in 0..10000 { let _ = ops(&a3, &b3); } println!("{:.5?} time taken for Number F64 wrapper", now.elapsed()); - for i in 0..10000 { + for _i in 0..10000 { let _ = ops(&a1, &b1); } println!("{:.5?} time taken for Dual", now.elapsed()); - for i in 0..10000 { + for _i in 0..10000 { let _ = ops(&a2, &b2); } println!("{:.5?} time taken for Number Dual wrapper", now.elapsed()); - for i in 0..10000 { + for _i in 0..10000 { let _ = ops(&a2, &a3); } println!( @@ -53,7 +53,7 @@ fn main() { now.elapsed() ); - for i in 0..10000 { + for _i in 0..10000 { let _ = ops2(a0, &a1); } println!("{:.5?} time taken for F64/Dual special func", now.elapsed());