diff --git a/doc/release_notes.rst b/doc/release_notes.rst index b05a396d8..4fc2d0f75 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -32,6 +32,8 @@ E.g. if a new rule becomes available describe how to use it `make test` and in o * Fix pre-commit docformatter python issue. `PR #1153 `__ +* Use BASE_DIR in rules and `_helpers.py` script for facilitate module import in subworkflow `PR #1137 `_ + PyPSA-Earth 0.4.1 ================= diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 76bf0268d..ce97f6171 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -36,8 +36,14 @@ # filename of the regions definition config file REGIONS_CONFIG = "regions_definition_config.yaml" +# prefix when running pypsa-earth rules in different directories (if running in pypsa-earth as subworkflow) +BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) -def check_config_version(config, fp_config="config.default.yaml"): +# absolute path to config.default.yaml +CONFIG_DEFAULT_PATH = os.path.join(BASE_DIR, "config.default.yaml") + + +def check_config_version(config, fp_config=CONFIG_DEFAULT_PATH): """ Check that a version of the local config.yaml matches to the actual config version as defined in config.default.yaml. @@ -87,7 +93,7 @@ def handle_exception(exc_type, exc_value, exc_traceback): def copy_default_files(): - fn = Path("config.yaml") + fn = Path(os.path.join(BASE_DIR, "config.yaml")) if not fn.exists(): fn.write_text( "# Write down config entries differing from config.default.yaml\n\nrun: {}" diff --git a/scripts/build_demand_profiles.py b/scripts/build_demand_profiles.py index 51f1193c0..3686364b2 100644 --- a/scripts/build_demand_profiles.py +++ b/scripts/build_demand_profiles.py @@ -50,7 +50,13 @@ import pypsa import scipy.sparse as sparse import xarray as xr -from _helpers import configure_logging, create_logger, read_csv_nafix, read_osm_config +from _helpers import ( + BASE_DIR, + configure_logging, + create_logger, + read_csv_nafix, + read_osm_config, +) from shapely.prepared import prep from shapely.validation import make_valid @@ -121,7 +127,7 @@ def get_load_paths_gegis(ssp_parentfolder, config): for continent in region_load: sel_ext = ".nc" for ext in [".nc", ".csv"]: - load_path = os.path.join(str(load_dir), str(continent) + str(ext)) + load_path = os.path.join(BASE_DIR, str(load_dir), str(continent) + str(ext)) if os.path.exists(load_path): sel_ext = ext break diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 1ebf220b4..2dce6032e 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -202,7 +202,7 @@ import pandas as pd import progressbar as pgb import xarray as xr -from _helpers import configure_logging, create_logger +from _helpers import BASE_DIR, configure_logging, create_logger from add_electricity import load_powerplants from dask.distributed import Client from pypsa.geo import haversine @@ -559,7 +559,7 @@ def create_scaling_factor( # filter plants for hydro if snakemake.wildcards.technology.startswith("hydro"): country_shapes = gpd.read_file(paths.country_shapes) - hydrobasins = gpd.read_file(resource["hydrobasins"]) + hydrobasins = gpd.read_file(os.path.join(BASE_DIR, resource["hydrobasins"])) ppls = load_powerplants(snakemake.input.powerplants) hydro_ppls = ppls[ppls.carrier == "hydro"] diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 22e6b68cf..8d3722de6 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -19,6 +19,7 @@ import requests import xarray as xr from _helpers import ( + BASE_DIR, configure_logging, create_logger, three_2_two_digits_country, @@ -85,7 +86,7 @@ def download_GADM(country_code, update=False, out_logging=False): GADM_url = f"https://geodata.ucdavis.edu/gadm/gadm4.1/gpkg/{GADM_filename}.gpkg" GADM_inputfile_gpkg = os.path.join( - os.getcwd(), + BASE_DIR, "data", "gadm", GADM_filename, @@ -489,7 +490,7 @@ def download_WorldPop_standard( ] WorldPop_inputfile = os.path.join( - os.getcwd(), "data", "WorldPop", WorldPop_filename + BASE_DIR, "data", "WorldPop", WorldPop_filename ) # Input filepath tif if not os.path.exists(WorldPop_inputfile) or update is True: @@ -543,7 +544,7 @@ def download_WorldPop_API( WorldPop_filename = f"{two_2_three_digits_country(country_code).lower()}_ppp_{year}_UNadj_constrained.tif" # Request to get the file WorldPop_inputfile = os.path.join( - os.getcwd(), "data", "WorldPop", WorldPop_filename + BASE_DIR, "data", "WorldPop", WorldPop_filename ) # Input filepath tif os.makedirs(os.path.dirname(WorldPop_inputfile), exist_ok=True) year_api = int(str(year)[2:]) @@ -580,12 +581,10 @@ def convert_GDP(name_file_nc, year=2015, out_logging=False): name_file_tif = name_file_nc[:-2] + "tif" # path of the nc file - GDP_nc = os.path.join(os.getcwd(), "data", "GDP", name_file_nc) # Input filepath nc + GDP_nc = os.path.join(BASE_DIR, "data", "GDP", name_file_nc) # Input filepath nc # path of the tif file - GDP_tif = os.path.join( - os.getcwd(), "data", "GDP", name_file_tif - ) # Input filepath nc + GDP_tif = os.path.join(BASE_DIR, "data", "GDP", name_file_tif) # Input filepath nc # Check if file exists, otherwise throw exception if not os.path.exists(GDP_nc): @@ -628,9 +627,7 @@ def load_GDP( # path of the nc file name_file_tif = name_file_nc[:-2] + "tif" - GDP_tif = os.path.join( - os.getcwd(), "data", "GDP", name_file_tif - ) # Input filepath tif + GDP_tif = os.path.join(BASE_DIR, "data", "GDP", name_file_tif) # Input filepath tif if update | (not os.path.exists(GDP_tif)): if out_logging: diff --git a/scripts/download_osm_data.py b/scripts/download_osm_data.py index c92fdc2b4..c327a7ae4 100644 --- a/scripts/download_osm_data.py +++ b/scripts/download_osm_data.py @@ -30,7 +30,7 @@ import shutil from pathlib import Path -from _helpers import configure_logging, create_logger, read_osm_config +from _helpers import BASE_DIR, configure_logging, create_logger, read_osm_config from earth_osm import eo logger = create_logger(__name__) @@ -99,8 +99,10 @@ def convert_iso_to_geofk( run = snakemake.config.get("run", {}) RDIR = run["name"] + "/" if run.get("name") else "" - store_path_resources = Path.joinpath(Path().cwd(), "resources", RDIR, "osm", "raw") - store_path_data = Path.joinpath(Path().cwd(), "data", "osm") + store_path_resources = Path.joinpath( + Path(BASE_DIR), "resources", RDIR, "osm", "raw" + ) + store_path_data = Path.joinpath(Path(BASE_DIR), "data", "osm") country_list = country_list_to_geofk(snakemake.params.countries) eo.save_osm_data( diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 9106fc90d..47d847b78 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -65,7 +65,7 @@ import pandas as pd import pypsa import requests -from _helpers import configure_logging, create_logger +from _helpers import BASE_DIR, configure_logging, create_logger from add_electricity import load_costs, update_transmission_costs idx = pd.IndexSlice @@ -85,11 +85,14 @@ def download_emission_data(): try: url = "https://jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/EDGAR/datasets/v60_GHG/CO2_excl_short-cycle_org_C/v60_GHG_CO2_excl_short-cycle_org_C_1970_2018.zip" with requests.get(url) as rq: - with open("data/co2.zip", "wb") as file: + with open(os.path.join(BASE_DIR, "data/co2.zip"), "wb") as file: file.write(rq.content) - file_path = "data/co2.zip" + file_path = os.path.join(BASE_DIR, "data/co2.zip") with ZipFile(file_path, "r") as zipObj: - zipObj.extract("v60_CO2_excl_short-cycle_org_C_1970_2018.xls", "data") + zipObj.extract( + "v60_CO2_excl_short-cycle_org_C_1970_2018.xls", + os.path.join(BASE_DIR, "data"), + ) os.remove(file_path) return "v60_CO2_excl_short-cycle_org_C_1970_2018.xls" except: @@ -117,7 +120,7 @@ def emission_extractor(filename, emission_year, country_names): """ # data reading process - datapath = os.path.join(os.getcwd(), "data", filename) + datapath = os.path.join(BASE_DIR, "data", filename) df = pd.read_excel(datapath, sheet_name="v6.0_EM_CO2_fossil_IPCC1996", skiprows=8) df.columns = df.iloc[0] df = df.set_index("Country_code_A3") diff --git a/scripts/retrieve_databundle_light.py b/scripts/retrieve_databundle_light.py index cf6e4c3b9..297599d4a 100644 --- a/scripts/retrieve_databundle_light.py +++ b/scripts/retrieve_databundle_light.py @@ -89,6 +89,7 @@ import pandas as pd import yaml from _helpers import ( + BASE_DIR, configure_logging, create_country_list, create_logger, @@ -143,7 +144,7 @@ def download_and_unzip_zenodo(config, rootpath, hot_run=True, disable_progress=F """ resource = config["category"] file_path = os.path.join(rootpath, "tempfile.zip") - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) url = config["urls"]["zenodo"] if hot_run: @@ -188,7 +189,7 @@ def download_and_unzip_gdrive(config, rootpath, hot_run=True, disable_progress=F """ resource = config["category"] file_path = os.path.join(rootpath, "tempfile.zip") - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) url = config["urls"]["gdrive"] # retrieve file_id from path @@ -266,7 +267,7 @@ def download_and_unzip_protectedplanet( """ resource = config["category"] file_path = os.path.join(rootpath, "tempfile_wpda.zip") - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) url = config["urls"]["protectedplanet"] def get_first_day_of_month(date): @@ -438,7 +439,7 @@ def download_and_unzip_direct(config, rootpath, hot_run=True, disable_progress=F True when download is successful, False otherwise """ resource = config["category"] - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) url = config["urls"]["direct"] file_path = os.path.join(destination, os.path.basename(url)) @@ -492,7 +493,7 @@ def download_and_unzip_hydrobasins( True when download is successful, False otherwise """ resource = config["category"] - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) url_templ = config["urls"]["hydrobasins"]["base_url"] suffix_list = config["urls"]["hydrobasins"]["suffixes"] @@ -543,7 +544,7 @@ def download_and_unzip_post(config, rootpath, hot_run=True, disable_progress=Fal True when download is successful, False otherwise """ resource = config["category"] - destination = os.path.relpath(config["destination"]) + destination = os.path.join(BASE_DIR, config["destination"]) # load data for post method postdata = config["urls"]["post"] @@ -792,8 +793,8 @@ def datafiles_retrivedatabundle(config): def merge_hydrobasins_shape(config_hydrobasin, hydrobasins_level): - basins_path = config_hydrobasin["destination"] - output_fl = config_hydrobasin["output"][0] + basins_path = os.path.join(BASE_DIR, config_hydrobasin["destination"]) + output_fl = os.path.join(BASE_DIR, config_hydrobasin["output"][0]) files_to_merge = [ "hybas_{0:s}_lev{1:02d}_v1c.shp".format(suffix, hydrobasins_level)