diff --git a/backend/bloom/tasks/create_new_excursion.py b/backend/bloom/tasks/create_new_excursion.py deleted file mode 100644 index 2c637159..00000000 --- a/backend/bloom/tasks/create_new_excursion.py +++ /dev/null @@ -1,108 +0,0 @@ -from bloom.domain.excursion import Excursion -from bloom.container import UseCases -from sqlalchemy.orm import Session -from shapely.geometry import Point -from datetime import datetime,timedelta -from typing import Optional -import pandas as pd -from geoalchemy2.shape import to_shape - -def add_excursion(vessel_id: int, departure_at: datetime, departure_position: Optional[Point] = None) -> int: - use_cases = UseCases() - db = use_cases.db() - excursion_repository = use_cases.excursion_repository() - - with db.session() as session: - result = excursion_repository.get_param_from_last_excursion(session, vessel_id) - - if result: - arrival_port_id = result["arrival_port_id"] - arrival_position = to_shape(result["arrival_position"]) - else: - arrival_port_id = None - arrival_position = None - - new_excursion = Excursion( - vessel_id=vessel_id, - departure_port_id=arrival_port_id if departure_position is None else None, - departure_at=departure_at, - departure_position=arrival_position if departure_position is None else departure_position, - arrival_port_id=None, - arrival_at=None, - arrival_position=None, - excursion_duration=None, - total_time_at_sea=None, - total_time_in_amp=None, - total_time_in_territorial_waters=None, - total_time_in_costal_waters=None, - total_time_fishing=None, - total_time_fishing_in_amp=None, - total_time_fishing_in_territorial_waters=None, - total_time_fishing_in_costal_waters=None, - total_time_extincting_amp=None - ) - - new_excursion_sql = excursion_repository.map_to_sql(new_excursion) - session.add(new_excursion_sql) - session.commit() - session.refresh(new_excursion_sql) - return new_excursion_sql.id - -def close_excursion(id: int, port_id: int, latitude: float, longitude: float, arrived_at: datetime) -> None: - - use_cases = UseCases() - db = use_cases.db() - excursion_repository = use_cases.excursion_repository() - - with db.session() as session: - excursion = excursion_repository.get_excursion_by_id(session, id) - - if excursion: - excursion.arrival_port_id = port_id - excursion.arrival_at = arrived_at - excursion.arrival_position = Point(longitude,latitude) - - close_excursion_sql = excursion_repository.map_to_sql(excursion) - session.merge(close_excursion_sql) # Utiliser merge pour mettre à jour l'entité dans la session - session.commit() - else: - raise ValueError(f"No excursion found with ID {id}") - -def update_excursion(id :int) -> None : - - use_cases = UseCases() - db = use_cases.db() - excursion_repository = use_cases.excursion_repository() - segment_repository = use_cases.segment_repository() - - with db.session() as session: - - total_segments = segment_repository.get_segments_by_excursions(session, id) - - total_segments['segment_duration'] = pd.to_timedelta(total_segments['segment_duration']) - excursion_duration=total_segments['segment_duration'].sum() - - in_amp=total_segments[total_segments.loc[:, 'in_amp_zone'] == 1] - amp_duration=in_amp['segment_duration'].sum() - - in_territorial_waters=total_segments[total_segments.loc[:, 'in_territorial_waters'] == 1] - territorial_duration=in_territorial_waters['segment_duration'].sum() - - in_costal_waters=total_segments[total_segments.loc[:, 'in_costal_waters'] == 1] - costal_duration=in_costal_waters['segment_duration'].sum() - - excursion = excursion_repository.get_excursion_by_id(session, id) - - if excursion: - excursion.excursion_duration = excursion_duration - excursion.total_time_in_amp = amp_duration - excursion.total_time_in_territorial_waters = territorial_duration - excursion.total_time_in_costal_waters = costal_duration - excursion.total_time_at_sea = excursion_duration - territorial_duration - costal_duration - - excursion_update_sql = excursion_repository.map_to_sql(excursion) - session.merge(excursion_update_sql) # Utiliser merge pour mettre à jour l'entité dans la session - session.commit() - session.close() - else: - raise ValueError(f"No excursion found with ID {id}") \ No newline at end of file diff --git a/backend/bloom/tasks/create_update_excursions_segments.py b/backend/bloom/tasks/create_update_excursions_segments.py index 0c63b8ed..22992ee0 100644 --- a/backend/bloom/tasks/create_update_excursions_segments.py +++ b/backend/bloom/tasks/create_update_excursions_segments.py @@ -44,7 +44,7 @@ def add_excursion(session: Session, vessel_id: int, departure_at: datetime, if result: arrival_port_id = result["arrival_port_id"] - arrival_position = to_shape(result["arrival_position"]) + arrival_position = to_shape(result["arrival_position"]) if result["arrival_position"] else None else: arrival_port_id = None arrival_position = None @@ -226,7 +226,7 @@ def get_port(x, session): for a in df.index: if (df["port"].iloc[a] >= 0): if (open_ongoing_excursion): - close_excursion(session, ongoing_excursion_id, df["port"].iloc[a], + close_excursion(session, ongoing_excursion_id, int(df["port"].iloc[a]), df["end_latitude"].iloc[a], df["end_longitude"].iloc[a], df["timestamp_end"].iloc[a]) # put the close excursion function here @@ -306,9 +306,9 @@ def get_time_of_departure(): new_rels.append(RelSegmentZone(segment_id=segment.id, zone_id=zone.id)) if zone.category == "amp": segment.in_amp_zone = True - elif zone.category == "coastal": + elif zone.category.startswith("Fishing coastal waters"): segment.in_costal_waters = True - elif zone.category == "territorial": + elif zone.category == "Territorial seas": segment.in_territorial_waters = True # Mise à jour de l'excursion avec le temps passé dans chaque type de zone excursion = excursions.get(segment.excursion_id, diff --git a/backend/bloom/tasks/load_dim_zone_amp_from_csv.py b/backend/bloom/tasks/load_dim_zone_amp_from_csv.py index fe3365b9..99e3ae1a 100644 --- a/backend/bloom/tasks/load_dim_zone_amp_from_csv.py +++ b/backend/bloom/tasks/load_dim_zone_amp_from_csv.py @@ -2,66 +2,62 @@ from time import perf_counter import pandas as pd +from shapely import wkb + from bloom.config import settings from bloom.container import UseCases from bloom.domain.zone import Zone -from bloom.infra.database.errors import DBException from bloom.logger import logger -from pydantic import ValidationError -from shapely import wkb + +FIC_ZONE = ["french_metropolitan_mpas.csv", "fishing_coastal_waters.csv", "territorial_seas.csv"] def map_to_domain(row: pd.Series) -> Zone: isna = row.isna() + json_data = {} + for k in ["index", "wdpaid", "desig_eng", "desig_type", "iucn_cat", "parent_iso", "iso3", "benificiaries", + "source", "reference"]: + try: + value = row[k] if not isna[k] else None + json_data[k] = value + except: + pass + return Zone( - category="amp", - sub_category=None, + category=row["category"], + sub_category=row["sub_category"] if not isna["sub_category"] else None, name=row["name"], geometry=row["geometry"], centroid=row["geometry"].centroid, - json_data={k: row[k] if not isna[k] else None for k in - ["index", "desig_eng", "desig_type", "iucn_cat", "parent_iso", "iso3", "benificiaries"]}, + json_data=json_data, ) -def run(csv_file_name: str): +def run(): use_cases = UseCases() db = use_cases.db() zone_repository = use_cases.zone_repository() - total = 0 - try: - df = pd.read_csv(csv_file_name, sep=",") - df = df.rename(columns={"Geometry": "geometry", - "Index": "index", "WDPAID": "wdpaid", - "Name": "name", - "DESIG_ENG": "desig_eng", - "DESIG_TYPE": "desig_type", - "IUCN_CAT": "iucn_cat", - "PARENT_ISO": "parent_iso", - "ISO3": "iso3", - "Benificiaries": "benificiaries"}) - df["geometry"] = df["geometry"].apply(wkb.loads) - zones = df.apply(map_to_domain, axis=1) - with db.session() as session: + with db.session() as session: + for fic_csv in FIC_ZONE: + file_name = Path(settings.data_folder).joinpath(fic_csv) + logger.info(f"Chargement des données du fichier {file_name}") + + total = 0 + df = pd.read_csv(file_name, sep=",") + df["geometry"] = df["geometry"].apply(wkb.loads) + zones = df.apply(map_to_domain, axis=1) zones = zone_repository.batch_create_zone(session, list(zones)) - session.commit() total = len(zones) - print(zones) - except ValidationError as e: - logger.error("Erreur de validation des données de bateau") - logger.error(e.errors()) - except DBException: - logger.error("Erreur d'insertion en base") - logger.info(f"{total} zone(s) créés") + logger.info(f"{total} zone(s) créés") + session.commit() if __name__ == "__main__": time_start = perf_counter() - file_name = Path(settings.data_folder).joinpath("./zones_subset.csv") - logger.info(f"DEBUT - Chargement des données des zones AMP depuis le fichier {file_name}") - run(file_name) + logger.info("DEBUT - Chargement des données des zones") + run() time_end = perf_counter() duration = time_end - time_start - logger.info(f"FIN - Chargement des données des zones AMP en {duration:.2f}s") + logger.info(f"FIN - Chargement des données des zones en {duration:.2f}s")