Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use absolute path for config.default.yaml in _helpers.py #1137

Merged
merged 18 commits into from
Oct 29, 2024
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ E.g. if a new rule becomes available describe how to use it `make test` and in o

* Enable configfile specification for mock_snakemake `PR #1135 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1135>`_

* Use sumodule prefix in rules and `_helpers.py` script for facilitate smooth module import `PR #1137 <https://github.com/pypsa-meets-earth/pypsa-earth/pull/1137>`_

PyPSA-Earth 0.4.1
=================

Expand Down
10 changes: 8 additions & 2 deletions scripts/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,14 @@
# filename of the regions definition config file
REGIONS_CONFIG = "regions_definition_config.yaml"

# prefix when running pypsa-earth rules in different directories (if running in pypsa-earth as subworkflow)
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))

def check_config_version(config, fp_config="config.default.yaml"):
# absolute path to config.default.yaml
CONFIG_DEFAULT_PATH = os.path.join(BASE_DIR, "config.default.yaml")


def check_config_version(config, fp_config=CONFIG_DEFAULT_PATH):
"""
Check that a version of the local config.yaml matches to the actual config
version as defined in config.default.yaml.
Expand Down Expand Up @@ -87,7 +93,7 @@ def handle_exception(exc_type, exc_value, exc_traceback):


def copy_default_files():
fn = Path("config.yaml")
fn = Path(os.path.join(BASE_DIR, "config.yaml"))
if not fn.exists():
fn.write_text(
"# Write down config entries differing from config.default.yaml\n\nrun: {}"
Expand Down
4 changes: 2 additions & 2 deletions scripts/build_demand_profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
import pypsa
import scipy.sparse as sparse
import xarray as xr
from _helpers import configure_logging, create_logger, read_csv_nafix, read_osm_config
from _helpers import configure_logging, create_logger, read_csv_nafix, read_osm_config, BASE_DIR
from shapely.prepared import prep
from shapely.validation import make_valid

Expand Down Expand Up @@ -121,7 +121,7 @@ def get_load_paths_gegis(ssp_parentfolder, config):
for continent in region_load:
sel_ext = ".nc"
for ext in [".nc", ".csv"]:
load_path = os.path.join(str(load_dir), str(continent) + str(ext))
load_path = os.path.join(BASE_DIR, str(load_dir), str(continent) + str(ext))
if os.path.exists(load_path):
sel_ext = ext
break
Expand Down
4 changes: 2 additions & 2 deletions scripts/build_renewable_profiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@
import pandas as pd
import progressbar as pgb
import xarray as xr
from _helpers import configure_logging, create_logger
from _helpers import configure_logging, create_logger, BASE_DIR
from add_electricity import load_powerplants
from dask.distributed import Client
from pypsa.geo import haversine
Expand Down Expand Up @@ -559,7 +559,7 @@ def create_scaling_factor(
# filter plants for hydro
if snakemake.wildcards.technology.startswith("hydro"):
country_shapes = gpd.read_file(paths.country_shapes)
hydrobasins = gpd.read_file(resource["hydrobasins"])
hydrobasins = gpd.read_file(os.path.join(BASE_DIR, resource["hydrobasins"]))
ppls = load_powerplants(snakemake.input.powerplants)

hydro_ppls = ppls[ppls.carrier == "hydro"]
Expand Down
17 changes: 7 additions & 10 deletions scripts/build_shapes.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import requests
import xarray as xr
from _helpers import (
BASE_DIR,
configure_logging,
create_logger,
three_2_two_digits_country,
Expand Down Expand Up @@ -85,7 +86,7 @@ def download_GADM(country_code, update=False, out_logging=False):
GADM_url = f"https://geodata.ucdavis.edu/gadm/gadm4.1/gpkg/{GADM_filename}.gpkg"

GADM_inputfile_gpkg = os.path.join(
os.getcwd(),
BASE_DIR,
"data",
"gadm",
GADM_filename,
Expand Down Expand Up @@ -489,7 +490,7 @@ def download_WorldPop_standard(
]

WorldPop_inputfile = os.path.join(
os.getcwd(), "data", "WorldPop", WorldPop_filename
BASE_DIR, "data", "WorldPop", WorldPop_filename
) # Input filepath tif

if not os.path.exists(WorldPop_inputfile) or update is True:
Expand Down Expand Up @@ -543,7 +544,7 @@ def download_WorldPop_API(
WorldPop_filename = f"{two_2_three_digits_country(country_code).lower()}_ppp_{year}_UNadj_constrained.tif"
# Request to get the file
WorldPop_inputfile = os.path.join(
os.getcwd(), "data", "WorldPop", WorldPop_filename
BASE_DIR, "data", "WorldPop", WorldPop_filename
) # Input filepath tif
os.makedirs(os.path.dirname(WorldPop_inputfile), exist_ok=True)
year_api = int(str(year)[2:])
Expand Down Expand Up @@ -580,12 +581,10 @@ def convert_GDP(name_file_nc, year=2015, out_logging=False):
name_file_tif = name_file_nc[:-2] + "tif"

# path of the nc file
GDP_nc = os.path.join(os.getcwd(), "data", "GDP", name_file_nc) # Input filepath nc
GDP_nc = os.path.join(BASE_DIR, "data", "GDP", name_file_nc) # Input filepath nc

# path of the tif file
GDP_tif = os.path.join(
os.getcwd(), "data", "GDP", name_file_tif
) # Input filepath nc
GDP_tif = os.path.join(BASE_DIR, "data", "GDP", name_file_tif) # Input filepath nc

# Check if file exists, otherwise throw exception
if not os.path.exists(GDP_nc):
Expand Down Expand Up @@ -628,9 +627,7 @@ def load_GDP(

# path of the nc file
name_file_tif = name_file_nc[:-2] + "tif"
GDP_tif = os.path.join(
os.getcwd(), "data", "GDP", name_file_tif
) # Input filepath tif
GDP_tif = os.path.join(BASE_DIR, "data", "GDP", name_file_tif) # Input filepath tif

if update | (not os.path.exists(GDP_tif)):
if out_logging:
Expand Down
6 changes: 3 additions & 3 deletions scripts/download_osm_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import shutil
from pathlib import Path

from _helpers import configure_logging, create_logger, read_osm_config
from _helpers import configure_logging, create_logger, read_osm_config, BASE_DIR
from earth_osm import eo

logger = create_logger(__name__)
Expand Down Expand Up @@ -99,8 +99,8 @@ def convert_iso_to_geofk(

run = snakemake.config.get("run", {})
RDIR = run["name"] + "/" if run.get("name") else ""
store_path_resources = Path.joinpath(Path().cwd(), "resources", RDIR, "osm", "raw")
store_path_data = Path.joinpath(Path().cwd(), "data", "osm")
store_path_resources = Path.joinpath(Path(BASE_DIR), "resources", RDIR, "osm", "raw")
store_path_data = Path.joinpath(Path(BASE_DIR), "data", "osm")
country_list = country_list_to_geofk(snakemake.params.countries)

eo.save_osm_data(
Expand Down
10 changes: 5 additions & 5 deletions scripts/prepare_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
import pandas as pd
import pypsa
import requests
from _helpers import configure_logging, create_logger
from _helpers import configure_logging, create_logger, BASE_DIR
from add_electricity import load_costs, update_transmission_costs

idx = pd.IndexSlice
Expand All @@ -85,11 +85,11 @@ def download_emission_data():
try:
url = "https://jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/EDGAR/datasets/v60_GHG/CO2_excl_short-cycle_org_C/v60_GHG_CO2_excl_short-cycle_org_C_1970_2018.zip"
with requests.get(url) as rq:
with open("data/co2.zip", "wb") as file:
with open(os.path.join(BASE_DIR, "data/co2.zip"), "wb") as file:
file.write(rq.content)
file_path = "data/co2.zip"
file_path = os.path.join(BASE_DIR, "data/co2.zip")
with ZipFile(file_path, "r") as zipObj:
zipObj.extract("v60_CO2_excl_short-cycle_org_C_1970_2018.xls", "data")
zipObj.extract("v60_CO2_excl_short-cycle_org_C_1970_2018.xls", os.path.join(BASE_DIR, "data"))
os.remove(file_path)
return "v60_CO2_excl_short-cycle_org_C_1970_2018.xls"
except:
Expand Down Expand Up @@ -117,7 +117,7 @@ def emission_extractor(filename, emission_year, country_names):
"""

# data reading process
datapath = os.path.join(os.getcwd(), "data", filename)
datapath = os.path.join(BASE_DIR, "data", filename)
df = pd.read_excel(datapath, sheet_name="v6.0_EM_CO2_fossil_IPCC1996", skiprows=8)
df.columns = df.iloc[0]
df = df.set_index("Country_code_A3")
Expand Down
17 changes: 9 additions & 8 deletions scripts/retrieve_databundle_light.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@
create_country_list,
create_logger,
progress_retrieve,
BASE_DIR
)
from google_drive_downloader import GoogleDriveDownloader as gdd
from tqdm import tqdm
Expand Down Expand Up @@ -143,7 +144,7 @@ def download_and_unzip_zenodo(config, rootpath, hot_run=True, disable_progress=F
"""
resource = config["category"]
file_path = os.path.join(rootpath, "tempfile.zip")
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])
url = config["urls"]["zenodo"]

if hot_run:
Expand Down Expand Up @@ -188,7 +189,7 @@ def download_and_unzip_gdrive(config, rootpath, hot_run=True, disable_progress=F
"""
resource = config["category"]
file_path = os.path.join(rootpath, "tempfile.zip")
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])
url = config["urls"]["gdrive"]

# retrieve file_id from path
Expand Down Expand Up @@ -266,7 +267,7 @@ def download_and_unzip_protectedplanet(
"""
resource = config["category"]
file_path = os.path.join(rootpath, "tempfile_wpda.zip")
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])
url = config["urls"]["protectedplanet"]

def get_first_day_of_month(date):
Expand Down Expand Up @@ -438,7 +439,7 @@ def download_and_unzip_direct(config, rootpath, hot_run=True, disable_progress=F
True when download is successful, False otherwise
"""
resource = config["category"]
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])
url = config["urls"]["direct"]

file_path = os.path.join(destination, os.path.basename(url))
Expand Down Expand Up @@ -492,7 +493,7 @@ def download_and_unzip_hydrobasins(
True when download is successful, False otherwise
"""
resource = config["category"]
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])
url_templ = config["urls"]["hydrobasins"]["base_url"]
suffix_list = config["urls"]["hydrobasins"]["suffixes"]

Expand Down Expand Up @@ -543,7 +544,7 @@ def download_and_unzip_post(config, rootpath, hot_run=True, disable_progress=Fal
True when download is successful, False otherwise
"""
resource = config["category"]
destination = os.path.relpath(config["destination"])
destination = os.path.join(BASE_DIR, config["destination"])

# load data for post method
postdata = config["urls"]["post"]
Expand Down Expand Up @@ -792,8 +793,8 @@ def datafiles_retrivedatabundle(config):


def merge_hydrobasins_shape(config_hydrobasin, hydrobasins_level):
basins_path = config_hydrobasin["destination"]
output_fl = config_hydrobasin["output"][0]
basins_path = os.path.join(BASE_DIR, config_hydrobasin["destination"])
output_fl = os.path.join(BASE_DIR, config_hydrobasin["output"][0])

files_to_merge = [
"hybas_{0:s}_lev{1:02d}_v1c.shp".format(suffix, hydrobasins_level)
Expand Down
Loading