Skip to content

Commit

Permalink
Use absolute path for config.default.yaml in _helpers.py (#1137)
Browse files Browse the repository at this point in the history
* add absolute path to config.default.yaml

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* add release notes

* download files using absolute path, not based on os.getcwd()

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* add prefix to outputs of retrieve_databundle_light if exists

* use absolute path for osm data download

* define prefix in _helpers

* import PREFIX in retrieve_databundle_light

* add PREFIX to search for .nc or .csv file in PREFIX/data

* revert to BASE_DIR usage

* BASE_DIR to prepare_network for emission data

* store config.yaml using absolute path

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
yerbol-akhmetov and pre-commit-ci[bot] authored Oct 29, 2024
1 parent f1b5c96 commit 3836aed
Show file tree
Hide file tree
Showing 8 changed files with 49 additions and 32 deletions.
2 changes: 2 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ E.g. if a new rule becomes available describe how to use it `make test` and in o

* Fix pre-commit docformatter python issue. `PR #1153 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1153>`__

* Use BASE_DIR in rules and `_helpers.py` script for facilitate module import in subworkflow `PR #1137 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1137>`_

PyPSA-Earth 0.4.1
=================

Expand Down
10 changes: 8 additions & 2 deletions scripts/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,14 @@
# filename of the regions definition config file
REGIONS_CONFIG = "regions_definition_config.yaml"

# prefix when running pypsa-earth rules in different directories (if running in pypsa-earth as subworkflow)
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))

def check_config_version(config, fp_config="config.default.yaml"):
# absolute path to config.default.yaml
CONFIG_DEFAULT_PATH = os.path.join(BASE_DIR, "config.default.yaml")


def check_config_version(config, fp_config=CONFIG_DEFAULT_PATH):
"""
Check that a version of the local config.yaml matches to the actual config
version as defined in config.default.yaml.
Expand Down Expand Up @@ -87,7 +93,7 @@ def handle_exception(exc_type, exc_value, exc_traceback):


def copy_default_files():
fn = Path("config.yaml")
fn = Path(os.path.join(BASE_DIR, "config.yaml"))
if not fn.exists():
fn.write_text(
"# Write down config entries differing from config.default.yaml\n\nrun: {}"
Expand Down
10 changes: 8 additions & 2 deletions scripts/build_demand_profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,13 @@
import pypsa
import scipy.sparse as sparse
import xarray as xr
from _helpers import configure_logging, create_logger, read_csv_nafix, read_osm_config
from _helpers import (
BASE_DIR,
configure_logging,
create_logger,
read_csv_nafix,
read_osm_config,
)
from shapely.prepared import prep
from shapely.validation import make_valid

Expand Down Expand Up @@ -121,7 +127,7 @@ def get_load_paths_gegis(ssp_parentfolder, config):
for continent in region_load:
sel_ext = ".nc"
for ext in [".nc", ".csv"]:
load_path = os.path.join(str(load_dir), str(continent) + str(ext))
load_path = os.path.join(BASE_DIR, str(load_dir), str(continent) + str(ext))
if os.path.exists(load_path):
sel_ext = ext
break
Expand Down
4 changes: 2 additions & 2 deletions scripts/build_renewable_profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@
import pandas as pd
import progressbar as pgb
import xarray as xr
from _helpers import configure_logging, create_logger
from _helpers import BASE_DIR, configure_logging, create_logger
from add_electricity import load_powerplants
from dask.distributed import Client
from pypsa.geo import haversine
Expand Down Expand Up @@ -559,7 +559,7 @@ def create_scaling_factor(
# filter plants for hydro
if snakemake.wildcards.technology.startswith("hydro"):
country_shapes = gpd.read_file(paths.country_shapes)
hydrobasins = gpd.read_file(resource["hydrobasins"])
hydrobasins = gpd.read_file(os.path.join(BASE_DIR, resource["hydrobasins"]))
ppls = load_powerplants(snakemake.input.powerplants)

hydro_ppls = ppls[ppls.carrier == "hydro"]
Expand Down
17 changes: 7 additions & 10 deletions scripts/build_shapes.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import requests
import xarray as xr
from _helpers import (
BASE_DIR,
configure_logging,
create_logger,
three_2_two_digits_country,
Expand Down Expand Up @@ -85,7 +86,7 @@ def download_GADM(country_code, update=False, out_logging=False):
GADM_url = f"https://geodata.ucdavis.edu/gadm/gadm4.1/gpkg/{GADM_filename}.gpkg"

GADM_inputfile_gpkg = os.path.join(
os.getcwd(),
BASE_DIR,
"data",
"gadm",
GADM_filename,
Expand Down Expand Up @@ -489,7 +490,7 @@ def download_WorldPop_standard(
]

WorldPop_inputfile = os.path.join(
os.getcwd(), "data", "WorldPop", WorldPop_filename
BASE_DIR, "data", "WorldPop", WorldPop_filename
) # Input filepath tif

if not os.path.exists(WorldPop_inputfile) or update is True:
Expand Down Expand Up @@ -543,7 +544,7 @@ def download_WorldPop_API(
WorldPop_filename = f"{two_2_three_digits_country(country_code).lower()}_ppp_{year}_UNadj_constrained.tif"
# Request to get the file
WorldPop_inputfile = os.path.join(
os.getcwd(), "data", "WorldPop", WorldPop_filename
BASE_DIR, "data", "WorldPop", WorldPop_filename
) # Input filepath tif
os.makedirs(os.path.dirname(WorldPop_inputfile), exist_ok=True)
year_api = int(str(year)[2:])
Expand Down Expand Up @@ -580,12 +581,10 @@ def convert_GDP(name_file_nc, year=2015, out_logging=False):
name_file_tif = name_file_nc[:-2] + "tif"

# path of the nc file
GDP_nc = os.path.join(os.getcwd(), "data", "GDP", name_file_nc) # Input filepath nc
GDP_nc = os.path.join(BASE_DIR, "data", "GDP", name_file_nc) # Input filepath nc

# path of the tif file
GDP_tif = os.path.join(
os.getcwd(), "data", "GDP", name_file_tif
) # Input filepath nc
GDP_tif = os.path.join(BASE_DIR, "data", "GDP", name_file_tif) # Input filepath nc

# Check if file exists, otherwise throw exception
if not os.path.exists(GDP_nc):
Expand Down Expand Up @@ -628,9 +627,7 @@ def load_GDP(

# path of the nc file
name_file_tif = name_file_nc[:-2] + "tif"
GDP_tif = os.path.join(
os.getcwd(), "data", "GDP", name_file_tif
) # Input filepath tif
GDP_tif = os.path.join(BASE_DIR, "data", "GDP", name_file_tif) # Input filepath tif

if update | (not os.path.exists(GDP_tif)):
if out_logging:
Expand Down
8 changes: 5 additions & 3 deletions scripts/download_osm_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import shutil
from pathlib import Path

from _helpers import configure_logging, create_logger, read_osm_config
from _helpers import BASE_DIR, configure_logging, create_logger, read_osm_config
from earth_osm import eo

logger = create_logger(__name__)
Expand Down Expand Up @@ -99,8 +99,10 @@ def convert_iso_to_geofk(

run = snakemake.config.get("run", {})
RDIR = run["name"] + "/" if run.get("name") else ""
store_path_resources = Path.joinpath(Path().cwd(), "resources", RDIR, "osm", "raw")
store_path_data = Path.joinpath(Path().cwd(), "data", "osm")
store_path_resources = Path.joinpath(
Path(BASE_DIR), "resources", RDIR, "osm", "raw"
)
store_path_data = Path.joinpath(Path(BASE_DIR), "data", "osm")
country_list = country_list_to_geofk(snakemake.params.countries)

eo.save_osm_data(
Expand Down
13 changes: 8 additions & 5 deletions scripts/prepare_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
import pandas as pd
import pypsa
import requests
from _helpers import configure_logging, create_logger
from _helpers import BASE_DIR, configure_logging, create_logger
from add_electricity import load_costs, update_transmission_costs

idx = pd.IndexSlice
Expand All @@ -85,11 +85,14 @@ def download_emission_data():
try:
url = "https://jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/EDGAR/datasets/v60_GHG/CO2_excl_short-cycle_org_C/v60_GHG_CO2_excl_short-cycle_org_C_1970_2018.zip"
with requests.get(url) as rq:
with open("data/co2.zip", "wb") as file:
with open(os.path.join(BASE_DIR, "data/co2.zip"), "wb") as file:
file.write(rq.content)
file_path = "data/co2.zip"
file_path = os.path.join(BASE_DIR, "data/co2.zip")
with ZipFile(file_path, "r") as zipObj:
zipObj.extract("v60_CO2_excl_short-cycle_org_C_1970_2018.xls", "data")
zipObj.extract(
"v60_CO2_excl_short-cycle_org_C_1970_2018.xls",
os.path.join(BASE_DIR, "data"),
)
os.remove(file_path)
return "v60_CO2_excl_short-cycle_org_C_1970_2018.xls"
except:
Expand Down Expand Up @@ -117,7 +120,7 @@ def emission_extractor(filename, emission_year, country_names):
"""

# data reading process
datapath = os.path.join(os.getcwd(), "data", filename)
datapath = os.path.join(BASE_DIR, "data", filename)
df = pd.read_excel(datapath, sheet_name="v6.0_EM_CO2_fossil_IPCC1996", skiprows=8)
df.columns = df.iloc[0]
df = df.set_index("Country_code_A3")
Expand Down
17 changes: 9 additions & 8 deletions scripts/retrieve_databundle_light.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@
import pandas as pd
import yaml
from _helpers import (
BASE_DIR,
configure_logging,
create_country_list,
create_logger,
Expand Down Expand Up @@ -143,7 +144,7 @@ def download_and_unzip_zenodo(config, rootpath, hot_run=True, disable_progress=F
"""
resource = config["category"]
file_path = os.path.join(rootpath, "tempfile.zip")
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])
url = config["urls"]["zenodo"]

if hot_run:
Expand Down Expand Up @@ -188,7 +189,7 @@ def download_and_unzip_gdrive(config, rootpath, hot_run=True, disable_progress=F
"""
resource = config["category"]
file_path = os.path.join(rootpath, "tempfile.zip")
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])
url = config["urls"]["gdrive"]

# retrieve file_id from path
Expand Down Expand Up @@ -266,7 +267,7 @@ def download_and_unzip_protectedplanet(
"""
resource = config["category"]
file_path = os.path.join(rootpath, "tempfile_wpda.zip")
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])
url = config["urls"]["protectedplanet"]

def get_first_day_of_month(date):
Expand Down Expand Up @@ -438,7 +439,7 @@ def download_and_unzip_direct(config, rootpath, hot_run=True, disable_progress=F
True when download is successful, False otherwise
"""
resource = config["category"]
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])
url = config["urls"]["direct"]

file_path = os.path.join(destination, os.path.basename(url))
Expand Down Expand Up @@ -492,7 +493,7 @@ def download_and_unzip_hydrobasins(
True when download is successful, False otherwise
"""
resource = config["category"]
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])
url_templ = config["urls"]["hydrobasins"]["base_url"]
suffix_list = config["urls"]["hydrobasins"]["suffixes"]

Expand Down Expand Up @@ -543,7 +544,7 @@ def download_and_unzip_post(config, rootpath, hot_run=True, disable_progress=Fal
True when download is successful, False otherwise
"""
resource = config["category"]
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])

# load data for post method
postdata = config["urls"]["post"]
Expand Down Expand Up @@ -792,8 +793,8 @@ def datafiles_retrivedatabundle(config):


def merge_hydrobasins_shape(config_hydrobasin, hydrobasins_level):
basins_path = config_hydrobasin["destination"]
output_fl = config_hydrobasin["output"][0]
basins_path = os.path.join(BASE_DIR, config_hydrobasin["destination"])
output_fl = os.path.join(BASE_DIR, config_hydrobasin["output"][0])

files_to_merge = [
"hybas_{0:s}_lev{1:02d}_v1c.shp".format(suffix, hydrobasins_level)
Expand Down

0 comments on commit 3836aed

Please sign in to comment.