From 7e914cd98efc35a6cb4d3204981322717cbdf04a Mon Sep 17 00:00:00 2001 From: Xiaohan Li Date: Thu, 6 Jul 2023 15:15:17 +0200 Subject: [PATCH] run pre-commit on all files --- .github/workflows/ci.yml | 51 ++-- envs/hydromt-delft3dfm.yml | 11 +- .../data/channels/spacing_pipe.csv | 2 +- hydromt_delft3dfm/data/dflowfm/DFlowFM.mdu | 18 +- .../data/landuse/corine_mapping.csv | 2 +- .../data/landuse/vito_mapping.csv | 2 +- hydromt_delft3dfm/data/parameters_data.yml | 1 + hydromt_delft3dfm/dflowfm.py | 220 ++++++++---------- hydromt_delft3dfm/utils.py | 41 ++-- hydromt_delft3dfm/workflows/__init__.py | 4 +- hydromt_delft3dfm/workflows/boundaries.py | 41 ++-- hydromt_delft3dfm/workflows/branches.py | 38 +-- hydromt_delft3dfm/workflows/crosssections.py | 46 ++-- hydromt_delft3dfm/workflows/dem.py | 11 +- hydromt_delft3dfm/workflows/graphs.py | 6 - hydromt_delft3dfm/workflows/helper.py | 38 ++- hydromt_delft3dfm/workflows/manholes.py | 8 +- hydromt_delft3dfm/workflows/mesh.py | 16 +- hydromt_delft3dfm/workflows/roughness.py | 10 - 19 files changed, 238 insertions(+), 328 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e2e719b1..861d640a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,10 +1,11 @@ +--- name: ci on: push: - branches: [ main ] + branches: [main] pull_request: - branches: [ main ] + branches: [main] jobs: build: @@ -19,27 +20,27 @@ jobs: os: ["windows-latest"] steps: - - uses: actions/checkout@v3 - - uses: conda-incubator/setup-miniconda@v2 - with: - python-version: ${{ matrix.python-version }} - miniforge-variant: Mambaforge - channels: conda-forge - channel-priority: strict - environment-file: envs/hydromt-delft3dfm.yml - activate-environment: hydromt-delft3dfm + - uses: actions/checkout@v3 + - uses: conda-incubator/setup-miniconda@v2 + with: + python-version: ${{ matrix.python-version }} + miniforge-variant: Mambaforge + channels: conda-forge + channel-priority: strict + environment-file: envs/hydromt-delft3dfm.yml + activate-environment: hydromt-delft3dfm - - name: Conda info - run: | - conda info - conda list - - - name: Install hydromt_delft3dfm - run: | - flit install --pth-file - hydromt --models - - - name: Test with pytest - run: | - python -m pytest --cov=hydromt_delft3dfm --cov-report xml --cov-report term - - uses: codecov/codecov-action@v1 + - name: Conda info + run: | + conda info + conda list + + - name: Install hydromt_delft3dfm + run: | + flit install --pth-file + hydromt --models + + - name: Test with pytest + run: | + python -m pytest --cov=hydromt_delft3dfm --cov-report xml --cov-report term + - uses: codecov/codecov-action@v1 diff --git a/envs/hydromt-delft3dfm.yml b/envs/hydromt-delft3dfm.yml index a4131e18..ce370273 100644 --- a/envs/hydromt-delft3dfm.yml +++ b/envs/hydromt-delft3dfm.yml @@ -1,3 +1,4 @@ +--- # note that these are the developer dependencies name: hydromt-delft3dfm @@ -15,7 +16,7 @@ dependencies: - pandas - pip - pre-commit # linting - - pydata-sphinx-theme # docs + - pydata-sphinx-theme # docs - pygeos - pyproj - pytest # tests @@ -28,11 +29,9 @@ dependencies: - shapely>=2.0.0 - sphinx # docs - sphinx-design # docs - - xarray + - xarray - xugrid - networkx - pip: - - hydrolib-core - - meshkernel - - + - hydrolib-core + - meshkernel diff --git a/hydromt_delft3dfm/data/channels/spacing_pipe.csv b/hydromt_delft3dfm/data/channels/spacing_pipe.csv index dcb5a083..43ba0842 100644 --- a/hydromt_delft3dfm/data/channels/spacing_pipe.csv +++ b/hydromt_delft3dfm/data/channels/spacing_pipe.csv @@ -5,4 +5,4 @@ pipe,circle,0.8,24 pipe,circle,1.0,24 pipe,circle,1.2,24 pipe,rectangle,,24 -pipe,trapeze,,0 \ No newline at end of file +pipe,trapeze,,0 diff --git a/hydromt_delft3dfm/data/dflowfm/DFlowFM.mdu b/hydromt_delft3dfm/data/dflowfm/DFlowFM.mdu index d4abdae3..508edffb 100644 --- a/hydromt_delft3dfm/data/dflowfm/DFlowFM.mdu +++ b/hydromt_delft3dfm/data/dflowfm/DFlowFM.mdu @@ -11,7 +11,7 @@ AutoStart = 0 PathsRelativeToParent= 0 [geometry] -NetFile = +NetFile = BathymetryFile = DryPointsFile = GridEnclosureFile = @@ -20,16 +20,16 @@ LandBoundaryFile = ThinDamFile = FixedWeirFile = PillarFile = -StructureFile = +StructureFile = VertplizFile = -CrossDefFile = -CrossLocFile = -FrictFile = -StorageNodeFile = -BranchFile = +CrossDefFile = +CrossLocFile = +FrictFile = +StorageNodeFile = +BranchFile = RoofsFile = ProfdefxyzFile = -IniFieldFile = +IniFieldFile = Uniformwidth1D = 2 ManholeFile = WaterLevIni = 0 @@ -179,7 +179,7 @@ RestartDateTime = [external forcing] ExtForceFile = -ExtForceFileNew = +ExtForceFileNew = QExt = 0 [trachytopes] diff --git a/hydromt_delft3dfm/data/landuse/corine_mapping.csv b/hydromt_delft3dfm/data/landuse/corine_mapping.csv index 88c29073..9dd10444 100644 --- a/hydromt_delft3dfm/data/landuse/corine_mapping.csv +++ b/hydromt_delft3dfm/data/landuse/corine_mapping.csv @@ -44,4 +44,4 @@ corine,description,landuse,roughness_manning,infiltcap 521,Coastal lagoons,521,0.0150,600.0 522,Estuaries,522,0.150,600.0 523,Sea and ocean,523,0.010,600.0 -999,No data,999,-999.0,-999.0 \ No newline at end of file +999,No data,999,-999.0,-999.0 diff --git a/hydromt_delft3dfm/data/landuse/vito_mapping.csv b/hydromt_delft3dfm/data/landuse/vito_mapping.csv index db70d3bd..65a0cbbc 100644 --- a/hydromt_delft3dfm/data/landuse/vito_mapping.csv +++ b/hydromt_delft3dfm/data/landuse/vito_mapping.csv @@ -21,4 +21,4 @@ vito,description ,landuse,roughness_ 125 ,Open forest mixed ,125 ,0.055 ,96.0 ,3.0 126 ,Open forest unknown ,126 ,0.055 ,96.0 ,3.0 200 ,Open sea ,200 ,0.025 ,0.0 ,3.0 -0 ,No data ,-999.0 ,-999.0 ,-999.0 ,-999.0 \ No newline at end of file +0 ,No data ,-999.0 ,-999.0 ,-999.0 ,-999.0 diff --git a/hydromt_delft3dfm/data/parameters_data.yml b/hydromt_delft3dfm/data/parameters_data.yml index 05361121..7c1d9d63 100644 --- a/hydromt_delft3dfm/data/parameters_data.yml +++ b/hydromt_delft3dfm/data/parameters_data.yml @@ -1,3 +1,4 @@ +--- vito_mapping: data_type: DataFrame driver: csv diff --git a/hydromt_delft3dfm/dflowfm.py b/hydromt_delft3dfm/dflowfm.py index 1a2c86a5..c1599595 100644 --- a/hydromt_delft3dfm/dflowfm.py +++ b/hydromt_delft3dfm/dflowfm.py @@ -1,33 +1,28 @@ -"""Implement Delft3D-FM hydromt plugin model class""" +"""Implement Delft3D-FM hydromt plugin model class.""" +import itertools import logging import os from datetime import datetime, timedelta from os.path import basename, dirname, isdir, isfile, join from pathlib import Path -from turtle import st from typing import Any, Dict, List, Optional, Tuple, Union -import itertools import geopandas as gpd import hydromt +import meshkernel as mk import numpy as np import pandas as pd import xarray as xr import xugrid as xu -import meshkernel as mk -from pyproj import CRS -from hydromt.models import MeshModel -from shapely.geometry import box, LineString -from datetime import datetime, timedelta -from meshkernel import MeshKernel - -from hydrolib.core.dflowfm import FMModel, IniFieldModel, Mesh1d, Network +from hydrolib.core.dflowfm import FMModel, IniFieldModel, Mesh1d from hydrolib.core.dimr import DIMR, FMComponent, Start +from hydromt.models import MeshModel # from hydrolib.dhydamo.geometry import mesh from meshkernel import GeometryList - +from pyproj import CRS +from shapely.geometry import LineString, box from . import DATADIR, utils, workflows @@ -36,7 +31,7 @@ class DFlowFMModel(MeshModel): - """API for Delft3D-FM models in HydroMT""" + """API for Delft3D-FM models in HydroMT.""" _NAME = "dflowfm" _CONF = "DFlowFM.mdu" @@ -149,7 +144,6 @@ def __init__( logger The logger used to log messages. """ - if not isinstance(root, (str, Path)): raise ValueError("The 'root' parameter should be a of str or Path.") @@ -206,7 +200,6 @@ def setup_region( If the region kind in `region` is not supported for D-Flow FM. Supported regions are: "bbox", "grid" and "geom". """ - kind, region = hydromt.workflows.parse_region(region, logger=self.logger) if kind == "bbox": geom = gpd.GeoDataFrame(geometry=[box(*region["bbox"])], crs=4326) @@ -247,8 +240,8 @@ def _setup_branches( """This function is to set all common steps to add branches type of objects (ie channels, rivers, pipes...). Default frictions and crossections will also be added. - Parameters - ---------- + Parameters + ---------- br_fn : str, gpd.GeoDataFrame Either data source in data catalogue or Path for branches file or branches gpd.GeoDataFrame directly. defaults_fn : str Path @@ -279,8 +272,8 @@ def _setup_branches( Keyword in branchtype column of br_fn used to filter lines. If None all lines in br_fn are used (default). - See also - -------- + See Also + -------- dflowfm.setup_rivers dflowfm.setup_pipes """ @@ -377,7 +370,7 @@ def _setup_branches( # Line smoothing for pipes smooth_branches = br_type == "pipe" - self.logger.info(f"Processing branches") + self.logger.info("Processing branches") branches, branches_nodes = workflows.process_branches( gdf_br, id_col="branchid", @@ -387,7 +380,7 @@ def _setup_branches( logger=self.logger, ) - self.logger.info(f"Validating branches") + self.logger.info("Validating branches") workflows.validate_branches(branches) # convert to model crs @@ -417,7 +410,7 @@ def setup_channels( snap_offset: float = 0.0, allow_intersection_snapping: bool = True, ): - """This component prepares the 1D channels and adds to branches 1D network + """This component prepares the 1D channels and adds to branches 1D network. Adds model layers: @@ -459,10 +452,10 @@ def setup_channels( By default True. See Also - ---------- + -------- dflowfm._setup_branches """ - self.logger.info(f"Preparing 1D channels.") + self.logger.info("Preparing 1D channels.") # filter for allowed columns _allowed_columns = [ @@ -507,11 +500,11 @@ def setup_channels( ) # add crosssections to exisiting ones and update geoms - self.logger.debug(f"Adding crosssections vector to geoms.") + self.logger.debug("Adding crosssections vector to geoms.") self.add_crosssections(crosssections) # setup geoms - self.logger.debug(f"Adding branches and branch_nodes vector to geoms.") + self.logger.debug("Adding branches and branch_nodes vector to geoms.") self.set_geoms(channels, "channels") self.set_geoms(channel_nodes, "channel_nodes") @@ -623,11 +616,11 @@ def setup_rivers_from_dem( i.e. sloping downward in downstream direction. See Also - ---------- + -------- workflows.get_river_bathymetry """ - self.logger.info(f"Preparing river shape from hydrography data.") + self.logger.info("Preparing river shape from hydrography data.") # read data ds_hydro = self.data_catalog.get_rasterdataset( hydrography_fn, geom=self.region, buffer=10 @@ -741,11 +734,11 @@ def setup_rivers_from_dem( ) # add crosssections to exisiting ones and update geoms - self.logger.debug(f"Adding crosssections vector to geoms.") + self.logger.debug("Adding crosssections vector to geoms.") self.add_crosssections(crosssections) # setup geoms #TODO do we still need channels? - self.logger.debug(f"Adding rivers and river_nodes vector to geoms.") + self.logger.debug("Adding rivers and river_nodes vector to geoms.") self.set_geoms(rivers, "rivers") self.set_geoms(river_nodes, "rivers_nodes") @@ -828,11 +821,11 @@ def setup_rivers( By default True. See Also - ---------- + -------- dflowfm._setup_branches dflowfm._setup_crosssections """ - self.logger.info(f"Preparing 1D rivers.") + self.logger.info("Preparing 1D rivers.") # filter for allowed columns _allowed_columns = [ "geometry", @@ -899,7 +892,7 @@ def setup_rivers( ] = -1 # setup geoms - self.logger.debug(f"Adding rivers and river_nodes vector to geoms.") + self.logger.debug("Adding rivers and river_nodes vector to geoms.") self.set_geoms(rivers, "rivers") self.set_geoms(river_nodes, "rivers_nodes") @@ -989,11 +982,11 @@ def setup_pipes( By default True. See Also - ---------- + -------- dflowfm._setup_branches dflowfm._setup_crosssections """ - self.logger.info(f"Preparing 1D pipes.") + self.logger.info("Preparing 1D pipes.") # filter for allowed columns _allowed_columns = [ @@ -1083,11 +1076,11 @@ def setup_pipes( midpoint=False, ) # add crosssections to exisiting ones and update geoms - self.logger.debug(f"Adding crosssections vector to geoms.") + self.logger.debug("Adding crosssections vector to geoms.") self.add_crosssections(crosssections) # setup geoms - self.logger.debug(f"Adding pipes and pipe_nodes vector to geoms.") + self.logger.debug("Adding pipes and pipe_nodes vector to geoms.") self.set_geoms(pipes, "pipes") self.set_geoms(pipe_nodes, "pipe_nodes") # TODO: for manholes @@ -1158,13 +1151,12 @@ def _setup_crosssections( ------ NotImplementedError: if ``crosssection_type`` is not recongnised. """ - # setup crosssections if crosssections_fn is None and crosssections_type == "branch": # TODO: set a seperate type for rivers because other branch types might require upstream/downstream # TODO: check for required columns # read crosssection from branches - self.logger.info(f"Preparing crossections from branch.") + self.logger.info("Preparing crossections from branch.") gdf_cs = workflows.set_branch_crosssections(branches, midpoint=midpoint) elif crosssections_type == "xyz": @@ -1187,7 +1179,7 @@ def _setup_crosssections( ) if not valid_attributes: self.logger.error( - f"Required attributes [crsid, order, z] in xyz crosssections do not exist" + "Required attributes [crsid, order, z] in xyz crosssections do not exist" ) return None @@ -1223,7 +1215,7 @@ def _setup_crosssections( ) if not valid_attributes: self.logger.error( - f"Required attributes [crsid, shape, shift] in point crosssections do not exist" + "Required attributes [crsid, shape, shift] in point crosssections do not exist" ) return None @@ -1257,7 +1249,7 @@ def setup_manholes( snap_offset: float = 1e-3, ): """ - Prepares the 1D manholes to pipes or tunnels. Can only be used after all branches are setup + Prepares the 1D manholes to pipes or tunnels. Can only be used after all branches are setup. The manholes are generated based on a set of standards specified in ``manhole_defaults_fn`` (default) and can be overwritten with manholes read from ``manholes_fn``. @@ -1311,7 +1303,7 @@ def setup_manholes( ] # generate manhole locations and bedlevels - self.logger.info(f"generating manholes locations and bedlevels. ") + self.logger.info("generating manholes locations and bedlevels. ") manholes, branches = workflows.generate_manholes_on_branches( self.branches, bedlevel_shift=bedlevel_shift, @@ -1355,14 +1347,14 @@ def setup_manholes( gdf_manhole[list(allowed_columns)], crs=gdf_manhole.crs ) # replace generated manhole using user manholes - self.logger.debug(f"overwriting generated manholes using user manholes.") + self.logger.debug("overwriting generated manholes using user manholes.") manholes = hydromt.gis_utils.nearest_merge( manholes, gdf_manhole, max_dist=snap_offset, overwrite=True ) # generate manhole streetlevels from dem if dem_fn is not None: - self.logger.info(f"overwriting manholes street level from dem. ") + self.logger.info("overwriting manholes street level from dem. ") dem = self.data_catalog.get_rasterdataset( dem_fn, geom=self.region, variables=["elevtn"] ) @@ -1376,7 +1368,7 @@ def setup_manholes( # internal administration # drop duplicated manholeid - self.logger.debug(f"dropping duplicated manholeid") + self.logger.debug("dropping duplicated manholeid") manholes.drop_duplicates(subset="manholeid") # add nodeid to manholes manholes = hydromt.gis_utils.nearest_merge( @@ -1396,7 +1388,7 @@ def setup_manholes( ) # setup geoms - self.logger.debug(f"Adding manholes vector to geoms.") + self.logger.debug("Adding manholes vector to geoms.") self.set_geoms(manholes, "manholes") def setup_1dboundary( @@ -1500,7 +1492,7 @@ def setup_1dboundary( pass else: self.logger.error( - f"forcing has different start and end time. Please check the forcing file. support yyyy-mm-dd HH:MM:SS. " + "forcing has different start and end time. Please check the forcing file. support yyyy-mm-dd HH:MM:SS. " ) # reproject if needed and convert to location if da_bnd.vector.crs != self.crs: @@ -1574,11 +1566,10 @@ def _setup_1dstructures( See Also - ---------- + -------- dflowfm.setup_bridges dflowfm.setup_culverts """ - if snap_offset is None: snap_offset = self._network_snap_offset self.logger.info( @@ -1748,10 +1739,9 @@ def setup_bridges( By default None. In this case, global variable "network_snap_offset" will be used.. See Also - ---------- + -------- dflowfm._setup_1dstructures """ - # keywords in hydrolib-core _st_type = "bridge" _allowed_columns = [ @@ -1808,7 +1798,7 @@ def setup_culverts( culvert_filter: Optional[str] = None, snap_offset: Optional[float] = None, ): - """Prepares culverts, including locations and crossections. Note that only subtype culvert is supported, i.e. inverted siphon is not supported + """Prepares culverts, including locations and crossections. Note that only subtype culvert is supported, i.e. inverted siphon is not supported. The culverts are read from ``culverts_fn`` and if any missing, filled with information provided in ``culverts_defaults_fn``. @@ -1856,10 +1846,9 @@ def setup_culverts( By default None. In this case, global variable "network_snap_offset" will be used.. See Also - ---------- + -------- dflowfm._setup_1dstructures """ - # keywords in hydrolib-core _st_type = "culvert" _allowed_columns = [ @@ -1961,10 +1950,9 @@ def setup_mesh2d( If the grid of the spatial domain contains 0 x-coordinates or 0 y-coordinates. See Also - ---------- + -------- """ - # Function moved to MeshModel in hydromt core # Recreate region dict for core function if mesh2d_fn is not None: @@ -1987,7 +1975,7 @@ def setup_mesh2d( # Check if intersects with region xmin, ymin, xmax, ymax = self.bounds subset = mesh2d.ugrid.sel(y=slice(ymin, ymax), x=slice(xmin, xmax)) - err = f"RasterDataset: No data within model region." + err = "RasterDataset: No data within model region." subset = subset.ugrid.assign_node_coords() if subset.ugrid.grid.node_x.size == 0 or subset.ugrid.grid.node_y.size == 0: raise IndexError(err) @@ -2038,7 +2026,6 @@ def setup_mesh2d_refine( By default 1, i.e. no refinement is applied. """ - if self.mesh2d is None: logger.error("2d mesh is not available.") return @@ -2101,7 +2088,7 @@ def setup_mesh2d_refine( # reproject if da.raster.crs != self.crs: self.logger.warning( - f"Sample grid has a different resolution than model. Reprojecting with nearest but some information might be lost." + "Sample grid has a different resolution than model. Reprojecting with nearest but some information might be lost." ) da = da.raster.reproject(self.crs, method="nearest") # get sample point @@ -2190,13 +2177,12 @@ def setup_link1d2d( Only used when ''link_direction'' = '2d_to_1d' and ''link_type'' = 'lateral'. Defaults to 2.0. Links with an intersection distance larger than 2 times the center to edge distance of the cell, are removed. - See Also - ---------- + See Also + -------- workflows.links1d2d_add_links_1d_to_2d workflows.links1d2d_add_links_2d_to_1d_embedded workflows.links1d2d_add_links_2d_to_1d_lateral """ - # check existing network if self.mesh1d.is_empty() or self.mesh2d.is_empty(): self.logger.error( @@ -2223,7 +2209,7 @@ def setup_link1d2d( else: branchids = None # use all branches self.logger.warning( - f"adding 1d2d links for all branches at non boundary locations." + "adding 1d2d links for all branches at non boundary locations." ) # setup 1d2d links @@ -2510,7 +2496,7 @@ def setup_2dboundary( if "boundary_id" in "boundaries_fn" does not match the columns of ``boundaries_timeseries_fn``. """ - self.logger.info(f"Preparing 2D boundaries.") + self.logger.info("Preparing 2D boundaries.") if boundary_type == "waterlevel": boundary_unit = "m" @@ -2618,7 +2604,7 @@ def setup_rainfall_from_constant( constant_value: float Constant value for the rainfall_rate timeseries in mm/day. """ - self.logger.info(f"Preparing rainfall meteo forcing from uniform timeseries.") + self.logger.info("Preparing rainfall meteo forcing from uniform timeseries.") refdate, tstart, tstop = self.get_model_time() # time slice meteo_location = ( @@ -2686,7 +2672,7 @@ def setup_rainfall_from_uniform_timeseries( By default True for "rainfall_rate". Note that Delft3DFM 1D2D Suite 2022.04 supports only "rainfall_rate". """ - self.logger.info(f"Preparing rainfall meteo forcing from uniform timeseries.") + self.logger.info("Preparing rainfall meteo forcing from uniform timeseries.") refdate, tstart, tstop = self.get_model_time() # time slice meteo_location = ( @@ -2744,7 +2730,8 @@ def _assert_read_mode(self): def read(self): """Method to read the complete model schematization and configuration from file. - # FIXME: where to read crs?""" + # FIXME: where to read crs?. + """ self.logger.info(f"Reading model data from {self.root}") self.read_dimr() self.read_config() @@ -2776,7 +2763,7 @@ def write(self): # complete model self.write_data_catalog() def read_config(self) -> None: - """Use Hydrolib-core reader and return to dictionnary""" + """Use Hydrolib-core reader and return to dictionnary.""" # Read via init_dfmmodel if self._dfmmodel is None: self.init_dfmmodel() @@ -2800,7 +2787,7 @@ def read_config(self) -> None: self._config = cf_dict def write_config(self) -> None: - """From config dict to Hydrolib MDU""" + """From config dict to Hydrolib MDU.""" # Not sure if this is worth it compared to just calling write_config super method # advantage is the validator but the whole model is then read when initialising FMModel self._assert_write_mode @@ -2820,7 +2807,7 @@ def write_config(self) -> None: os.chdir(cwd) def read_maps(self) -> Dict[str, Union[xr.Dataset, xr.DataArray]]: - """Read maps from initialfield and parse to dict of xr.DataArray""" + """Read maps from initialfield and parse to dict of xr.DataArray.""" self._assert_read_mode # Read initial fields inifield_model = self.dfmmodel.geometry.inifieldfile @@ -2873,7 +2860,7 @@ def read_maps(self) -> Dict[str, Union[xr.Dataset, xr.DataArray]]: return self._maps def write_maps(self) -> None: - """Write maps as tif files in maps folder and update initial fields""" + """Write maps as tif files in maps folder and update initial fields.""" if len(self._maps) == 0: self.logger.debug("No maps data found, skip writing.") return @@ -3004,7 +2991,7 @@ def read_geoms(self) -> None: # FIXME: gives an error when only 2D model. self.set_geoms(structures[structures["type"] == st_type], f"{st_type}s") def write_geoms(self) -> None: - """Write model geometries to a GeoJSON file at /""" + """Write model geometries to a GeoJSON file at /.""" self._assert_write_mode # Write geojson equivalent of all objects. Note that these files are not directly used when updating the model @@ -3030,7 +3017,7 @@ def write_geoms(self) -> None: # Write structures # Manholes if "manholes" in self._geoms: - self.logger.info(f"Writting manholes file.") + self.logger.info("Writting manholes file.") storage_fn = utils.write_manholes( self.geoms["manholes"], savedir, @@ -3047,7 +3034,7 @@ def write_geoms(self) -> None: structures = list(itertools.chain.from_iterable(structures)) structures = pd.DataFrame(structures).replace(np.nan, None) # write - self.logger.info(f"Writting structures file.") + self.logger.info("Writting structures file.") structures_fn = utils.write_structures( structures, savedir, @@ -3057,7 +3044,7 @@ def write_geoms(self) -> None: def read_forcing( self, ) -> None: # FIXME reading of forcing should include boundary, lateral and meteo - """Read forcing at and parse to dict of xr.DataArray""" + """Read forcing at and parse to dict of xr.DataArray.""" self._assert_read_mode # Read external forcing ext_model = self.dfmmodel.external_forcing.extforcefilenew @@ -3107,7 +3094,7 @@ def read_forcing( # TODO lateral def write_forcing(self) -> None: - """write forcing into hydrolib-core ext and forcing models""" + """Write forcing into hydrolib-core ext and forcing models.""" if len(self._forcing) == 0: self.logger.debug("No forcing data found, skip writing.") else: @@ -3124,7 +3111,7 @@ def write_forcing(self) -> None: self.set_config("external_forcing.extforcefilenew", ext_fn) def read_mesh(self): - """Read network file with Hydrolib-core and extract 2D mesh/branches info""" + """Read network file with Hydrolib-core and extract 2D mesh/branches info.""" self._assert_read_mode # Read mesh @@ -3194,7 +3181,7 @@ def read_mesh(self): self._branches = branches def write_mesh(self, write_gui=True): - """Write 1D branches and 2D mesh at in model ready format""" + """Write 1D branches and 2D mesh at in model ready format.""" self._assert_write_mode savedir = dirname(join(self.root, self._config_fn)) @@ -3227,28 +3214,27 @@ def write_mesh(self, write_gui=True): # other mesh1d related geometry if not self.mesh1d.is_empty() and write_gui: self.logger.info("Writting branches.gui file") - manholes = None if "manholes" in self.geoms: - manholes = self.geoms["manholes"] + self.geoms["manholes"] _ = utils.write_branches_gui(self.branches, savedir) def read_states(self): - """Read states at and parse to dict of xr.DataArray""" + """Read states at and parse to dict of xr.DataArray.""" return self._states # raise NotImplementedError() def write_states(self): - """write states at in model ready format""" + """Write states at in model ready format.""" pass # raise NotImplementedError() def read_results(self): - """Read results at and parse to dict of xr.DataArray""" + """Read results at and parse to dict of xr.DataArray.""" return self._results # raise NotImplementedError() def write_results(self): - """write results at in model ready format""" + """Write results at in model ready format.""" pass # raise NotImplementedError() @@ -3306,13 +3292,13 @@ def init_dfmmodel(self): @property def dimr(self): - """DIMR file object""" + """DIMR file object.""" if not self._dimr: self.read_dimr() return self._dimr def read_dimr(self, dimr_fn: Optional[str] = None) -> None: - """Read DIMR from file and else create from hydrolib-core""" + """Read DIMR from file and else create from hydrolib-core.""" if dimr_fn is None: dimr_fn = join(self.root, self._dimr_fn) # if file exist, read @@ -3327,7 +3313,7 @@ def read_dimr(self, dimr_fn: Optional[str] = None) -> None: self._dimr = dimr def write_dimr(self, dimr_fn: Optional[str] = None): - """Writes the dmir file. In write mode, updates first the FMModel component""" + """Writes the dmir file. In write mode, updates first the FMModel component.""" # force read self.dimr if dimr_fn is not None: @@ -3391,7 +3377,7 @@ def set_branches(self, branches: gpd.GeoDataFrame): _ = self.set_branches_component(name="pipe") # update geom - self.logger.debug(f"Adding branches vector to geoms.") + self.logger.debug("Adding branches vector to geoms.") self.set_geoms(branches, "branches") # set hydrolib-core net object and update self._mesh @@ -3400,7 +3386,7 @@ def set_branches(self, branches: gpd.GeoDataFrame): # update boundaries self.set_geoms(self.get_boundaries(), "boundaries") - self.logger.debug(f"Updating branches in network.") + self.logger.debug("Updating branches in network.") def add_branches( self, @@ -3408,8 +3394,7 @@ def add_branches( branchtype: str, node_distance: float = 40.0, ): - """Add new branches of branchtype to the branches and mesh1d object""" - + """Add new branches of branchtype to the branches and mesh1d object.""" snap_newbranches_to_branches_at_snapnodes = ( self._snap_newbranches_to_branches_at_snapnodes ) @@ -3525,14 +3510,12 @@ def closedsystem(self): @property def mesh1d(self): - """ - Returns the mesh1d (hydrolib-core Mesh1d object) representing the 1D mesh. - """ + """Returns the mesh1d (hydrolib-core Mesh1d object) representing the 1D mesh.""" return self.dfmmodel.geometry.netfile.network._mesh1d @property def mesh1d_nodes(self): - """Returns the nodes of mesh 1D as geodataframe""" + """Returns the nodes of mesh 1D as geodataframe.""" mesh1d_nodes = gpd.points_from_xy( x=self.mesh1d.mesh1d_node_x, y=self.mesh1d.mesh1d_node_y, @@ -3552,7 +3535,7 @@ def mesh1d_nodes(self): return mesh1d_nodes def set_mesh1d(self): - """update the mesh1d in hydrolib-core net object by overwrite and add to self._mesh""" + """Update the mesh1d in hydrolib-core net object by overwrite and add to self._mesh.""" # reinitialise mesh1d (TODO: a clear() function in hydrolib-core could be handy) self.dfmmodel.geometry.netfile.network._mesh1d = Mesh1d( meshkernel=self.dfmmodel.geometry.netfile.network.meshkernel @@ -3587,8 +3570,7 @@ def set_mesh1d(self): # self._add_mesh1d(self.mesh1d) def _add_mesh1d(self, mesh1d: Mesh1d): - """update the mesh1d in self.mesh object""" - + """Update the mesh1d in self.mesh object.""" # Create Ugrid1d object for mesh # TODO: after release of xugrid use grid = xu.Ugrid1d.from_meshkernel(mesh1d) grid = xu.Ugrid1d( @@ -3706,7 +3688,7 @@ def _add_mesh1d(self, mesh1d: Mesh1d): @property def crosssections(self): - """Quick accessor to crosssections geoms""" + """Quick accessor to crosssections geoms.""" if "crosssections" in self.geoms: gdf = self.geoms["crosssections"] else: @@ -3714,7 +3696,7 @@ def crosssections(self): return gdf def add_crosssections(self, crosssections: gpd.GeoDataFrame): - """Updates crosssections in geoms with new ones""" + """Updates crosssections in geoms with new ones.""" # TODO: sort out the crosssections, e.g. remove branch crosssections if point/xyz exist etc # TODO: setup river crosssections, set contrains based on branch types if len(self.crosssections) > 0: @@ -3732,7 +3714,7 @@ def add_crosssections(self, crosssections: gpd.GeoDataFrame): ) if _crosssections_locations["temp_id"].duplicated().any(): logger.warning( - f"Duplicate crosssections locations found, removing duplicates" + "Duplicate crosssections locations found, removing duplicates" ) # Remove duplicates based on the branch_id, branch_offset column, keeping the first occurrence (with minimum branch_distance) _crosssections_locations = _crosssections_locations.drop_duplicates( @@ -3753,7 +3735,7 @@ def add_crosssections(self, crosssections: gpd.GeoDataFrame): ) if mask_to_remove.sum() > 0: self.logger.warning( - f"Overwrite branch crossections where user-defined crossections are used." + "Overwrite branch crossections where user-defined crossections are used." ) crosssections = gpd.GeoDataFrame( pd.concat( @@ -3765,7 +3747,7 @@ def add_crosssections(self, crosssections: gpd.GeoDataFrame): @property def boundaries(self) -> gpd.GeoDataFrame: - """Quick accessor to boundaries geoms""" + """Quick accessor to boundaries geoms.""" if "boundaries" in self.geoms: gdf = self.geoms["boundaries"] else: @@ -3776,9 +3758,8 @@ def get_boundaries(self) -> gpd.GeoDataFrame: """Get all boundary locations from the network branch ends are possible locations for boundaries for open system, both upstream and downstream ends are allowed to have boundaries - for closed system, only downstream ends are allowed to have boundaries + for closed system, only downstream ends are allowed to have boundaries. """ - # generate all possible and allowed boundary locations _boundaries = workflows.generate_boundaries_from_branches( self.branches, where="both" @@ -3791,16 +3772,19 @@ def get_boundaries(self) -> gpd.GeoDataFrame: return boundaries def set_boundaries(self, boundaries: gpd.GeoDataFrame): - """Updates boundaries in geoms with new ones""" + """Updates boundaries in geoms with new ones.""" if len(self.boundaries) > 0: - task_last = lambda s1, s2: s2 + + def task_last(s1, s2): + return s2 + boundaries = self.boundaries.combine( boundaries, func=task_last, overwrite=True ) self.set_geoms(boundaries, name="boundaries") def get_model_time(self): - """Return (refdate, tstart, tstop) tuple with parsed model reference datem start and end time""" + """Return (refdate, tstart, tstop) tuple with parsed model reference datem start and end time.""" refdate = datetime.strptime(str(self.get_config("time.refdate")), "%Y%m%d") tstart = refdate + timedelta(seconds=float(self.get_config("time.tstart"))) tstop = refdate + timedelta(seconds=float(self.get_config("time.tstop"))) @@ -3815,7 +3799,7 @@ def get_model_time(self): @property def network1d_nodes(self): - """get network1d nodes as gdp""" + """Get network1d nodes as gdp.""" # get networkids to complete the boundaries _network1d_nodes = gpd.points_from_xy( x=self.dfmmodel.geometry.netfile.network._mesh1d.network1d_node_x, @@ -3832,21 +3816,19 @@ def network1d_nodes(self): @property def res(self): - "resolution of the mesh2d" + "Resolution of the mesh2d." if self._res is not None: return self._res @property def mesh2d(self): - """ - Returns the mesh2d (hydrolib-core Mesh2d object) representing the 2D mesh. - """ + """Returns the mesh2d (hydrolib-core Mesh2d object) representing the 2D mesh.""" return ( self.dfmmodel.geometry.netfile.network._mesh2d ) # needed to setup 1d2d links def set_mesh2d(self, mesh2d: mk.py_structures.Mesh2d = None): - """update the mesh2d in hydrolib-core net object by overwrite""" + """Update the mesh2d in hydrolib-core net object by overwrite.""" # process mesh2d to net object if mesh2d is None and self._mesh is not None: mesh2d = self._mesh.ugrid.grid.mesh @@ -3854,16 +3836,12 @@ def set_mesh2d(self, mesh2d: mk.py_structures.Mesh2d = None): @property def link1d2d(self): - """ - Returns the link1d2d (hydrolib-core Link1d2d object) representing the 1d2d links. - """ + """Returns the link1d2d (hydrolib-core Link1d2d object) representing the 1d2d links.""" return self.dfmmodel.geometry.netfile.network._link1d2d @property def network(self): - """ - Returns the network (hydrolib-core Network object) representing the entire network file. - """ + """Returns the network (hydrolib-core Network object) representing the entire network file.""" return self.dfmmodel.geometry.netfile.network @property diff --git a/hydromt_delft3dfm/utils.py b/hydromt_delft3dfm/utils.py index 968f449c..c0eaf9e4 100644 --- a/hydromt_delft3dfm/utils.py +++ b/hydromt_delft3dfm/utils.py @@ -1,29 +1,27 @@ -import os from enum import Enum from os.path import join from pathlib import Path -from typing import Dict, List, Tuple, Union +from typing import Dict, List, Tuple import geopandas as gpd import numpy as np import pandas as pd import xarray as xr -from shapely.geometry import LineString, Point - from hydrolib.core.dflowfm import ( - FMModel, + Boundary, BranchModel, CrossDefModel, CrossLocModel, ExtModel, - Boundary, - Meteo, + FMModel, ForcingModel, FrictionModel, + Meteo, PolyFile, StorageNodeModel, StructureModel, ) +from shapely.geometry import Point from .workflows import helper @@ -52,7 +50,7 @@ def read_branches_gui( fm_model: FMModel, ) -> gpd.GeoDataFrame: """ - Read branches.gui and add the properties to branches geodataframe + Read branches.gui and add the properties to branches geodataframe. Parameters ---------- @@ -111,7 +109,7 @@ def write_branches_gui( savedir: str, ) -> str: """ - write branches.gui file from branches geodataframe + write branches.gui file from branches geodataframe. Parameters ---------- @@ -130,7 +128,6 @@ def write_branches_gui( #TODO: branches.gui is written with a [general] section which is not recongnised by GUI. Improvement of the GUI is needed. #TODO: branches.gui has a column is custumised length written as bool, which is not recongnised by GUI. improvement of the hydrolib-core writer is needed. """ - if not gdf["branchtype"].isin(["pipe", "tunnel"]).any(): gdf[["manhole_up", "manhole_dn"]] = "" @@ -252,7 +249,7 @@ def _list2Str(lst): def write_crosssections(gdf: gpd.GeoDataFrame, savedir: str) -> Tuple[str, str]: - """write crosssections into hydrolib-core crsloc and crsdef objects + """Write crosssections into hydrolib-core crsloc and crsdef objects. Parameters ---------- @@ -315,7 +312,7 @@ def write_crosssections(gdf: gpd.GeoDataFrame, savedir: str) -> Tuple[str, str]: def read_friction(gdf: gpd.GeoDataFrame, fm_model: FMModel) -> gpd.GeoDataFrame: """ read friction files and add properties to branches geodataframe. - assumes cross-sections have been read before to contain per branch frictionid + assumes cross-sections have been read before to contain per branch frictionid. Parameters ---------- @@ -362,7 +359,7 @@ def read_friction(gdf: gpd.GeoDataFrame, fm_model: FMModel) -> gpd.GeoDataFrame: def write_friction(gdf: gpd.GeoDataFrame, savedir: str) -> List[str]: """ - write friction files from crosssections geodataframe + write friction files from crosssections geodataframe. Parameters ---------- @@ -452,7 +449,7 @@ def read_structures(branches: gpd.GeoDataFrame, fm_model: FMModel) -> gpd.GeoDat def write_structures(gdf: gpd.GeoDataFrame, savedir: str) -> str: """ - write structures into hydrolib-core structures objects + write structures into hydrolib-core structures objects. Parameters ---------- @@ -528,7 +525,7 @@ def read_manholes(gdf: gpd.GeoDataFrame, fm_model: FMModel) -> gpd.GeoDataFrame: def write_manholes(gdf: gpd.GeoDataFrame, savedir: str) -> str: """ - write manholes into hydrolib-core storage nodes objects + write manholes into hydrolib-core storage nodes objects. Parameters ---------- @@ -558,7 +555,7 @@ def read_1dboundary( ) -> xr.DataArray: """ Read for a specific quantity the corresponding external and forcing files and parse to xarray - # TODO: support external forcing for 2D + # TODO: support external forcing for 2D. Parameters ---------- @@ -650,7 +647,7 @@ def read_1dboundary( def write_1dboundary(forcing: Dict, savedir: str = None, ext_fn: str = None) -> Tuple: """ " - write 1dboundary ext and boundary files from forcing dict + write 1dboundary ext and boundary files from forcing dict. Parameters ---------- @@ -735,7 +732,7 @@ def write_1dboundary(forcing: Dict, savedir: str = None, ext_fn: str = None) -> def read_2dboundary(df: pd.DataFrame, workdir: Path = Path.cwd()) -> xr.DataArray: """ - Read a 2d boundary forcing location and values, and parse to xarray + Read a 2d boundary forcing location and values, and parse to xarray. Parameters ---------- @@ -750,7 +747,6 @@ def read_2dboundary(df: pd.DataFrame, workdir: Path = Path.cwd()) -> xr.DataArra da_out: xr.DataArray External and forcing values combined into a DataArray with name starts with "boundary2d". """ - # Initialise dataarray attributes bc = {"quantity": df.quantity} # location file, assume one location file has only one location (hydromt writer) and read @@ -787,7 +783,7 @@ def read_2dboundary(df: pd.DataFrame, workdir: Path = Path.cwd()) -> xr.DataArra # Else not implemented yet else: raise NotImplementedError( - f"ForcingFile with several function for a single variable not implemented yet. Skipping reading forcing." + "ForcingFile with several function for a single variable not implemented yet. Skipping reading forcing." ) # Get coordinates @@ -823,7 +819,6 @@ def write_2dboundary(forcing: Dict, savedir: str, ext_fn: str = None) -> list[di Path of the external forcing file (.ext) in which this function will append to. """ - # filter for 2d boundary forcing = { key: forcing[key] for key in forcing.keys() if key.startswith("boundary2d") @@ -901,7 +896,7 @@ def write_2dboundary(forcing: Dict, savedir: str, ext_fn: str = None) -> list[di def read_meteo(df: pd.DataFrame, quantity: str) -> xr.DataArray: """ - Read for a specific quantity the corresponding external and forcing files and parse to xarray + Read for a specific quantity the corresponding external and forcing files and parse to xarray. Parameters ---------- @@ -998,7 +993,6 @@ def write_meteo(forcing: Dict, savedir: str, ext_fn: str = None) -> list[dict]: Path of the external forcing file (.ext) in which this function will append to. """ - # filter for 2d meteo forcing = {key: forcing[key] for key in forcing.keys() if key.startswith("meteo")} if len(forcing) == 0: @@ -1079,7 +1073,6 @@ def write_ext( By default, append. """ - # FIXME: requires change of working directory for the validator to work properly import os diff --git a/hydromt_delft3dfm/workflows/__init__.py b/hydromt_delft3dfm/workflows/__init__.py index 52869a72..05ba18fb 100644 --- a/hydromt_delft3dfm/workflows/__init__.py +++ b/hydromt_delft3dfm/workflows/__init__.py @@ -1,11 +1,11 @@ -"""HydroMT dflowfm workflows""" +"""HydroMT dflowfm workflows.""" from .boundaries import * from .branches import * from .crosssections import * -from .mesh import * from .dem import * from .graphs import * from .helper import * from .manholes import * +from .mesh import * from .roughness import * diff --git a/hydromt_delft3dfm/workflows/boundaries.py b/hydromt_delft3dfm/workflows/boundaries.py index 11516f27..ef4a90f7 100644 --- a/hydromt_delft3dfm/workflows/boundaries.py +++ b/hydromt_delft3dfm/workflows/boundaries.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -import configparser import logging from pathlib import Path @@ -8,11 +7,8 @@ import hydromt.io import numpy as np import pandas as pd -import shapely import xarray as xr -from hydromt import config -from scipy.spatial import distance -from shapely.geometry import LineString, Point +from shapely.geometry import Point from .graphs import gpd_to_digraph @@ -46,7 +42,6 @@ def generate_boundaries_from_branches( gpd.GeoDataFrame A data frame containing all the upstream and downstream end nodes of the branches """ - # convert branches to graph G = gpd_to_digraph(branches) @@ -105,7 +100,6 @@ def select_boundary_type( Parameters ---------- - boundaries : gpd.GeoDataFrame The boundaries. branch_type : {'river', 'pipe'} @@ -123,7 +117,6 @@ def select_boundary_type( pd.DataFrame A data frame containing the boundary location per branch type and boundary type. """ - boundaries_branch_type = boundaries.loc[boundaries["branchtype"] == branch_type, :] if branch_type == "river": if boundary_type == "waterlevel": @@ -172,7 +165,6 @@ def validate_boundaries(boundaries: gpd.GeoDataFrame, branch_type: str = "river" The branch type. """ - if branch_type == "river": # TODO add other open system branch_type for _, bnd in boundaries.iterrows(): # TODO extended @@ -200,7 +192,7 @@ def compute_boundary_values( logger=logger, ): """ - Compute 1d boundary values + Compute 1d boundary values. Parameters ---------- @@ -230,7 +222,6 @@ def compute_boundary_values( logger Logger to log messages. """ - nodata_ids = [] # Timeseries boundary values if da_bnd is not None: logger.info(f"Preparing 1D {boundary_type} boundaries from timeseries.") @@ -363,7 +354,6 @@ def compute_2dboundary_values( ValueError: if no boundary to compute. """ - # Timeseries boundary values if boundaries is None or len(boundaries) == 0: raise ValueError("No boundary to compute.") @@ -442,12 +432,11 @@ def compute_2dboundary_values( def gpd_to_pli(gdf: gpd.GeoDataFrame, output_dir: Path): - """function to convert geopandas GeoDataFrame (gdf) into pli files at 'output_dir' directory. + """Function to convert geopandas GeoDataFrame (gdf) into pli files at 'output_dir' directory. the geodataframe must has index as stations and geometry of the stations. each row of the geodataframe will be converted into a single pli file. the file name and the station name will be the index of that row. """ - for _, g in gdf.iterrows(): pli_name = g.index pli_coords = g.geometry.coords[:] @@ -466,7 +455,7 @@ def df_to_bc( unit="m3/s", freq="H", ): - """function to convert pandas timeseires 'df' into bc file at 'output_dir'/'output_filename'.bc + """Function to convert pandas timeseires 'df' into bc file at 'output_dir'/'output_filename'.bc the time series must has time as index, columns names as stations. the time series will be first converted into a equidistance timeseries with frequency specified in 'freq'. support [D, H,M,S] each columns-wise array will be converted into one bc timeseries. @@ -479,21 +468,21 @@ def df_to_bc( stations = df.columns with open(output_dir.joinpath(f"{output_filename}.bc"), "w") as f: - f.write(f"[General]\n") - f.write(f"\tfileVersion = 1.01\n") - f.write(f"\tfileType = boundConds\n") + f.write("[General]\n") + f.write("\tfileVersion = 1.01\n") + f.write("\tfileType = boundConds\n") for s in stations: d = df[s] - f.write(f"\n") - f.write(f"[forcing]\n") + f.write("\n") + f.write("[forcing]\n") f.write(f"\tName = {d.name}\n") - f.write(f"\tfunction = timeSeries\n") - f.write(f"\ttimeInterpolation = linear\n") + f.write("\tfunction = timeSeries\n") + f.write("\ttimeInterpolation = linear\n") f.write(f"\tquantity = {quantity}\n") f.write(f"\tunit = {unit}\n") - f.write(f"\tquantity = time\n") + f.write("\tquantity = time\n") f.write(f"\tunit = {time_unit[freq]} since {time[0].date()}\n") - f.write(f"\t0 0\n") + f.write("\t0 0\n") for i, di in enumerate(d.values): f.write(f"\t{i} {di}\n") @@ -506,7 +495,7 @@ def compute_meteo_forcings( logger=logger, ) -> xr.DataArray: """ - Compute meteo forcings + Compute meteo forcings. Parameters ---------- @@ -543,7 +532,7 @@ def compute_meteo_forcings( # Timeseries boundary values - logger.info(f"Preparing global (spatially uniform) timeseries.") + logger.info("Preparing global (spatially uniform) timeseries.") # get data freq in seconds _TIMESTR = {"D": "days", "H": "hours", "T": "minutes", "S": "seconds"} dt = df_meteo.time[1] - df_meteo.time[0] diff --git a/hydromt_delft3dfm/workflows/branches.py b/hydromt_delft3dfm/workflows/branches.py index 95d50e46..41d56af8 100644 --- a/hydromt_delft3dfm/workflows/branches.py +++ b/hydromt_delft3dfm/workflows/branches.py @@ -1,18 +1,13 @@ # -*- coding: utf-8 -*- -import configparser import logging -from typing import Union import geopandas as gpd -import hydromt.io import numpy as np import pandas as pd import shapely -from hydromt import config from scipy.spatial import distance from shapely.geometry import LineString, MultiLineString, Point -from shapely.ops import snap, split from .helper import cut_pieces, split_lines @@ -188,8 +183,7 @@ def process_branches( branches_nodes : gpd.GeoDataFrame Preprocessed branches' nodes. """ - - logger.debug(f"Cleaning up branches") + logger.debug("Cleaning up branches") # TODO: maybe add arguments,use branch cross sections # global_controls = branches_ini.get("global", None) @@ -201,11 +195,11 @@ def process_branches( logger=logger, ) - logger.debug(f"Splitting branches based on spacing") + logger.debug("Splitting branches based on spacing") # TODO: add check, if spacing is used, then in branch cross section cannot be setup later branches = space_branches(branches, smooth_branches=smooth_branches, logger=logger) - logger.debug(f"Generating branchnodes") + logger.debug("Generating branchnodes") branch_nodes = generate_branchnodes(branches, id_col, logger=logger) return branches, branch_nodes @@ -226,7 +220,7 @@ def cleanup_branches( * Removing branches that are shorter than 0.1 meters * Renaming branches with duplicate IDs * Reducing the precision of the branch geometry to 6 digits. - * Snapping the branches + * Snapping the branches. Parameters ---------- @@ -246,7 +240,6 @@ def cleanup_branches( gpd.GeoDataFrame The cleanup branches. """ - # remove null geometry branches = branches.loc[~branches.geometry.isna(), :] @@ -258,7 +251,7 @@ def cleanup_branches( [p[:2] for p in x.coords] # simply line geometry by removing Z coodinates ) ) - logger.debug(f"Exploding branches.") + logger.debug("Exploding branches.") # remove duplicated geometry G = _branches["geometry"].apply(lambda geom: geom.wkb) @@ -321,21 +314,21 @@ def cleanup_branches( branches = reduce_gdf_precision( branches, rounding_precision=6 # branches_ini["global"]["rounding_precision"] ) # recommned to be larger than e-8 - logger.debug(f"Reducing precision of the GeoDataFrame. Rounding precision (e-6) .") + logger.debug("Reducing precision of the GeoDataFrame. Rounding precision (e-6) .") # snap branches if allow_intersection_snapping is True: # snap points no matter it is at intersection or ends branches = snap_branch_ends(branches, offset=snap_offset) logger.debug( - f"Performing snapping at all branch ends, including intersections (To avoid messy results, please use a lower snap_offset)." + "Performing snapping at all branch ends, including intersections (To avoid messy results, please use a lower snap_offset)." ) else: # snap points at ends only branches = snap_branch_ends(branches, offset=snap_offset, max_points=2) logger.debug( - f"Performing snapping at all branch ends, excluding intersections (To avoid messy results, please use a lower snap_offset).." + "Performing snapping at all branch ends, excluding intersections (To avoid messy results, please use a lower snap_offset).." ) # Drop count column @@ -368,7 +361,6 @@ def space_branches( gpd.GeoDataFrame The split branches. """ - # split branches based on spacing branches_ = split_branches( branches, spacing_col=spacing_col, smooth_branches=smooth_branches @@ -402,7 +394,6 @@ def generate_branchnodes( gpd.GeoDataFrame The branch nodes. """ - # generate node up and downstream nodes = pd.DataFrame( [Point(l.coords[0]) for li, l in branches["geometry"].items()] @@ -429,7 +420,7 @@ def generate_branchnodes( # remove duplicated geometry _nodes = nodes.copy() G = _nodes["geometry"].apply(lambda geom: geom.wkb) - n = len(G) - len(G.drop_duplicates().index) + len(G) - len(G.drop_duplicates().index) nodes = _nodes[_nodes.index.isin(G.drop_duplicates().index)] nodes = gpd.GeoDataFrame(nodes) nodes.crs = branches.crs @@ -459,7 +450,7 @@ def validate_branches( else: logger.error( f"Branches {branches.index[branches.geometry.length <= 0]} have length of 0 meter. " - + f"Issue might have been caused by using a snap_offset that is too large. Please revise or modify the branches data layer. " + + "Issue might have been caused by using a snap_offset that is too large. Please revise or modify the branches data layer. " ) @@ -495,7 +486,6 @@ def split_branches( split_branches : gpd.GeoDataFrame Branches after split, new ids will be overwritten for the branch index. Old ids are stored in "OLD_" + index. """ - id_col = branches.index.name if spacing_col is None: logger.info(f"Splitting branches with spacing of {spacing_const} [m]") @@ -558,7 +548,6 @@ def _split_branches_by_spacing_const( split_branches : gpd.GeoDataFrame Branches after split, new ids will be stored in id_col. Original ids are stored in "ORIG_" + id_col. """ - if spacing_const == float("inf"): branches[f"ORIG_{id_col}"] = branches[id_col] branches.index = branches[id_col] @@ -728,7 +717,6 @@ def snap_branch_ends( branches : gpd.GeoDataFrame Branches updated with snapped geometry. """ - # Collect endpoints _endpoints = [] for branch in branches.itertuples(): @@ -794,7 +782,6 @@ def possibly_intersecting( dataframebounds : numpy.array geometry : shapely.geometry.Polygon """ - geobounds = geometry.bounds idx = ( (dataframebounds[0] - buffer < geobounds[2]) @@ -916,9 +903,9 @@ def snap_newbranches_to_branches_at_snapnodes( branches: gpd.GeoDataFrame, snapnodes: gpd.GeoDataFrame, ): - """function to snap new_branches to branches at snapnodes. + """Function to snap new_branches to branches at snapnodes. snapnodes are located at branches. new branches will be snapped, and branches will be splitted. - # NOTE: no interpolation of crosssection is needed because inter branch interpolation is turned on using branchorder + # NOTE: no interpolation of crosssection is needed because inter branch interpolation is turned on using branchorder. Parameters ---------- @@ -936,7 +923,6 @@ def snap_newbranches_to_branches_at_snapnodes( branches_snapped : geopandas.GeoDataFrame Geodataframe of branches splitted at snapnodes to allow connection with the new_branches_snapped. """ - new_branches.index = new_branches.branchid branches.index = branches.branchid diff --git a/hydromt_delft3dfm/workflows/crosssections.py b/hydromt_delft3dfm/workflows/crosssections.py index 69d0fec4..cf29726f 100644 --- a/hydromt_delft3dfm/workflows/crosssections.py +++ b/hydromt_delft3dfm/workflows/crosssections.py @@ -1,21 +1,16 @@ # -*- coding: utf-8 -*- -import configparser import logging import geopandas as gpd -import hydromt.io import numpy as np import pandas as pd -import shapely -from hydromt import config -from scipy.spatial import distance from shapely.geometry import LineString, Point from .branches import find_nearest_branch # from delft3dfmpy.core import geometry -from .helper import check_gpd_attributes, split_lines +from .helper import check_gpd_attributes logger = logging.getLogger(__name__) @@ -90,7 +85,7 @@ def set_branch_crosssections( ), axis=1, ) - valid_attributes = check_gpd_attributes( + check_gpd_attributes( rectangle_crs, required_columns=[ "branch_id", @@ -118,7 +113,7 @@ def set_branch_crosssections( ), axis=1, ) - valid_attributes = check_gpd_attributes( + check_gpd_attributes( trapezoid_crs, required_columns=[ "branch_id", @@ -189,7 +184,7 @@ def set_branch_crosssections( lambda x: "circ_d{:,.3f}_{:s}".format(x["diameter"], "branch"), axis=1, ) # note diameter is reserved keywords in geopandas - valid_attributes = check_gpd_attributes( + check_gpd_attributes( circle_crs, required_columns=[ "branch_id", @@ -359,7 +354,7 @@ def set_point_crosssections( ): """ Function to set regular cross-sections from point. - only support rectangle, trapezoid, circle and yz + only support rectangle, trapezoid, circle and yz. Parameters ---------- @@ -377,15 +372,14 @@ def set_point_crosssections( gpd.GeoDataFrame The cross sections. """ - # check if crs mismatch if crosssections.crs != branches.crs: - logger.error(f"mismatch crs between cross-sections and branches") + logger.error("mismatch crs between cross-sections and branches") # remove duplicated geometries _nodes = crosssections.copy() G = _nodes["geometry"].apply(lambda geom: geom.wkb) - n = len(G) - len(G.drop_duplicates().index) + len(G) - len(G.drop_duplicates().index) crosssections = _nodes[_nodes.index.isin(G.drop_duplicates().index)] # snap to branch @@ -435,7 +429,7 @@ def set_point_crosssections( lambda x: "circ_d{:,.3f}_{:s}".format(x["diameter"], "point"), axis=1, ) - valid_attributes = check_gpd_attributes( + check_gpd_attributes( circle_crs, required_columns=[ "branch_id", @@ -459,7 +453,7 @@ def set_point_crosssections( ), axis=1, ) - valid_attributes = check_gpd_attributes( + check_gpd_attributes( rectangle_crs, required_columns=[ "branch_id", @@ -487,7 +481,7 @@ def set_point_crosssections( ), axis=1, ) - valid_attributes = check_gpd_attributes( + check_gpd_attributes( trapezoid_crs, required_columns=[ "branch_id", @@ -506,7 +500,7 @@ def set_point_crosssections( ) elif shape == "zw": zw_crs = crosssections.loc[crosssections["shape"] == shape, :] - valid_attributes = check_gpd_attributes( + check_gpd_attributes( trapezoid_crs, required_columns=[ "branch_id", @@ -524,7 +518,7 @@ def set_point_crosssections( crosssections_ = pd.concat([crosssections_, _set_zw_crs(zw_crs)]) elif shape == "yz": yz_crs = crosssections.loc[crosssections["shape"] == shape, :] - valid_attributes = check_gpd_attributes( + check_gpd_attributes( trapezoid_crs, required_columns=[ "branch_id", @@ -556,8 +550,7 @@ def set_point_crosssections( def _set_circle_crs(crosssections: gpd.GeoDataFrame): - """circle crossection""" - + """Circle crossection.""" crsdefs = [] crslocs = [] for c in crosssections.itertuples(): @@ -598,8 +591,7 @@ def _set_circle_crs(crosssections: gpd.GeoDataFrame): def _set_rectangle_crs(crosssections: gpd.GeoDataFrame): - """rectangle crossection""" - + """Rectangle crossection.""" crsdefs = [] crslocs = [] for c in crosssections.itertuples(): @@ -641,8 +633,7 @@ def _set_rectangle_crs(crosssections: gpd.GeoDataFrame): def _set_trapezoid_crs(crosssections: gpd.GeoDataFrame): - """trapezoid need to be converted into zw type""" - + """Trapezoid need to be converted into zw type.""" # check for non-valid trapezoid crs if ( (crosssections["width"] <= 0).any() @@ -698,8 +689,7 @@ def _set_trapezoid_crs(crosssections: gpd.GeoDataFrame): def _set_zw_crs(crosssections: gpd.GeoDataFrame): - """set zw profile""" - + """Set zw profile.""" crsdefs = [] crslocs = [] for c in crosssections.itertuples(): @@ -742,8 +732,7 @@ def _set_zw_crs(crosssections: gpd.GeoDataFrame): def _set_yz_crs(crosssections: gpd.GeoDataFrame): - """set yz profile""" - + """Set yz profile.""" crsdefs = [] crslocs = [] for c in crosssections.itertuples(): @@ -893,7 +882,6 @@ def xyzp2xyzl(xyz: pd.DataFrame, sort_by: list = ["x", "y"]): gpd.GeoSeries The xyz lines. """ - sort_by = [s.lower() for s in sort_by] if xyz is not None: diff --git a/hydromt_delft3dfm/workflows/dem.py b/hydromt_delft3dfm/workflows/dem.py index 5ca64eaa..c86d04d3 100644 --- a/hydromt_delft3dfm/workflows/dem.py +++ b/hydromt_delft3dfm/workflows/dem.py @@ -7,7 +7,6 @@ import numpy as np import pyflwdir import xarray as xr -from hydromt.flw import d8_from_dem, flwdir_from_da from hydromt.gis_utils import nearest, nearest_merge, spread2d from hydromt.workflows import rivers from scipy import ndimage @@ -27,8 +26,8 @@ def invert_levels_from_dem( Invert levels are computed as DEM - depth - pipe diameter/height. - Parameters: - ----------- + Parameters + ---------- gdf: gpd.GeoDataFrame Pipes gdf. @@ -83,6 +82,7 @@ def get_rivbank_dz( (HAND) values adjecent to river cells. For each feature in `gdf_riv` the nearest river bank cells are identified and the bank heigth is estimated based on a quantile value `q`. + Parameters ---------- gdf_riv : gpd.GeoDataFrame @@ -100,7 +100,7 @@ def get_rivbank_dz( rivbank_dz: np.ndarray riverbank elevations for each segment in `gdf_riv` da_riv_mask, da_bnk_mask: xr.DataArray: - River and river-bank masks + River and river-bank masks. """ # rasterize streams gdf_riv = gdf_riv.copy() @@ -165,6 +165,7 @@ def get_river_bathymetry( """Estimate river bedlevel zb using gradually varying flow (gvf), manning's equation (manning) or a power-law relation (powlaw) rivdph_method. The river is based on flow directions with and minimum upstream area threshold. + Parameters ---------- ds : xr.Dataset @@ -209,7 +210,7 @@ def get_river_bathymetry( gdf_riv: gpd.GeoDataFrame River segments with bed level (zb) estimates da_msk: xr.DataArray: - River mask + River mask. """ raster_kwargs = dict(coords=ds.raster.coords, dims=ds.raster.dims) da_elv = ds[elevtn_name] diff --git a/hydromt_delft3dfm/workflows/graphs.py b/hydromt_delft3dfm/workflows/graphs.py index 9387b752..7709503f 100644 --- a/hydromt_delft3dfm/workflows/graphs.py +++ b/hydromt_delft3dfm/workflows/graphs.py @@ -1,15 +1,9 @@ # -*- coding: utf-8 -*- -import configparser import logging import geopandas as gpd -import hydromt.io import networkx as nx -import numpy as np -import pandas as pd -import shapely -from hydromt import config logger = logging.getLogger(__name__) diff --git a/hydromt_delft3dfm/workflows/helper.py b/hydromt_delft3dfm/workflows/helper.py index 243d4f49..4f85832c 100644 --- a/hydromt_delft3dfm/workflows/helper.py +++ b/hydromt_delft3dfm/workflows/helper.py @@ -5,19 +5,11 @@ import pathlib import geopandas as gpd -import hydromt.io import numpy as np import pandas as pd -import shapely -from hydromt import config -from scipy.spatial import distance from shapely.geometry import ( LineString, - MultiLineString, - MultiPoint, Point, - Polygon, - box, ) from shapely.ops import snap, split @@ -54,7 +46,6 @@ def isfloat(x): bool True if `x` is a float, otherwise False. """ - try: float(x) return True @@ -172,10 +163,9 @@ def slice_geodataframe( gpd.GeoDataFrame The sliced geo data frame. """ - # check data if gdf is None or len(gdf) == 0: - logger.error(f"GeoDataFrame: no slicing is applied. data is None or empty.") + logger.error("GeoDataFrame: no slicing is applied. data is None or empty.") return gdf else: _data = gdf @@ -187,7 +177,7 @@ def slice_geodataframe( # column wise slicing if required_columns is None: logger.debug( - f"GeoDataFrame: no column-wise slicing and retyping applied. required_columns is not specified" + "GeoDataFrame: no column-wise slicing and retyping applied. required_columns is not specified" ) data = _data @@ -219,20 +209,19 @@ def slice_geodataframe( logger.error(e) else: logger.debug( - f"GeoDataFrame: no row-wise slicing applied. required_query is not specified." + "GeoDataFrame: no row-wise slicing applied. required_query is not specified." ) if len(data) == 0: - logger.error(f"GeoDataFrame: Zero items are left after slicing.") + logger.error("GeoDataFrame: Zero items are left after slicing.") return data def retype_geodataframe(gdf: gpd.GeoDataFrame, retype=None, logger=logger): """Retype a GeoDataFrame.""" - if retype is None or len(retype) == 0: - logger.debug(f"GeoDataFrame: no retyping is applied. retype is not specified.") + logger.debug("GeoDataFrame: no retyping is applied. retype is not specified.") else: cols = gdf.columns @@ -290,7 +279,7 @@ def eval_funcs(gdf: gpd.GeoDataFrame, funcs: dict, logger=logger): The geo data frame with the updated columns. """ if funcs is None or len(funcs) == 0: - logger.debug(f"GeoDataFrame: no funcs is applied. funcs is not specified.") + logger.debug("GeoDataFrame: no funcs is applied. funcs is not specified.") return gdf for k, v in funcs.items(): @@ -325,7 +314,6 @@ def write_shp(data: gpd.GeoDataFrame, filename: str, columns: list = None): If not specified, all the columns in the dataset will be written. Default to None. """ - if data is not None: # convert to numerical data = data.apply(pd.to_numeric, errors="ignore") @@ -350,7 +338,7 @@ def append_data_columns_based_on_ini_query( keys: list = [], logger=logger, ): - """append key,val pair as data columns for the input GeoDataFrame based on ini [default] or [query] sections + """Append key,val pair as data columns for the input GeoDataFrame based on ini [default] or [query] sections. Parameters ---------- @@ -424,7 +412,6 @@ def check_geodataframe(gdf: gpd.GeoDataFrame): bool True if `gdf` is not None and has at least one entry; otherwise, False. """ - if gdf is None or len(gdf) == 0: check = False logger.warning("GeoDataFrame: do not have valid features. ") @@ -435,7 +422,7 @@ def check_geodataframe(gdf: gpd.GeoDataFrame): ## geometry def cut_pieces(line, distances): - """cut a line into pieces based on distances""" + """Cut a line into pieces based on distances.""" if distances[0] != 0: distances.insert(0, 0) if distances[-1] == line.length: @@ -449,7 +436,8 @@ def cut_pieces(line, distances): def cut(line, distance): """Cuts a line in two at a distance from its starting point - ref: https://shapely.readthedocs.io/en/stable/manual.html""" + ref: https://shapely.readthedocs.io/en/stable/manual.html. + """ if distance <= 0.0 or distance >= line.length: return [LineString(line)] coords = list(line.coords) @@ -511,7 +499,7 @@ def split_lines(line, num_new_lines): def check_gpd_attributes( gdf: gpd.GeoDataFrame, required_columns: list, raise_error: bool = False ): - """check if the geodataframe contains all required columns + """Check if the geodataframe contains all required columns. Parameters ---------- @@ -542,7 +530,7 @@ def update_data_columns_attributes_based_on_filter( filter_value: str = None, ): """ - Add or update columns in the geodataframe based on column and values in attributes dataframe + Add or update columns in the geodataframe based on column and values in attributes dataframe. If filter_column and filter_value is set, only update the attributes of the filtered geodataframe. @@ -598,7 +586,7 @@ def get_gdf_from_branches( branches: gpd.GeoDataFrame, df: pd.DataFrame ) -> gpd.GeoDataFrame: """Get geodataframe from dataframe. - Based on interpolation of branches, using columns ["branchid", "chainage" in df] + Based on interpolation of branches, using columns ["branchid", "chainage" in df]. Parameters ---------- diff --git a/hydromt_delft3dfm/workflows/manholes.py b/hydromt_delft3dfm/workflows/manholes.py index b0028326..bf21201d 100644 --- a/hydromt_delft3dfm/workflows/manholes.py +++ b/hydromt_delft3dfm/workflows/manholes.py @@ -157,7 +157,7 @@ def generate_manholes_on_branches( def _update_pipes_from_manholes(manholes: gpd.GeoDataFrame, pipes: gpd.GeoDataFrame): - """assign manholes 'manholeid' to pipes ['manhole_up', 'manhole_dn'] based on geometry""" + """Assign manholes 'manholeid' to pipes ['manhole_up', 'manhole_dn'] based on geometry.""" manholes_dict = { (m.geometry.x, m.geometry.y): manholes.loc[mi, "manholeid"] for mi, m in manholes.iterrows() @@ -182,10 +182,10 @@ def _update_pipes_from_manholes(manholes: gpd.GeoDataFrame, pipes: gpd.GeoDataFr def _get_pipe_stats_for_manholes( manholes: gpd.GeoDataFrame, pipes_col: str, stats_col: str, method: str ): - """get the stats from all pipes connecting a single manholes + """Get the stats from all pipes connecting a single manholes. - parameters - -------------------- + Parameters + ---------- pipes_col: used to identify pipes connected to the manhole (multiple rows of pipes for a single manhole), e.g. BRANCH_ID. stats_col: the column used to obtain the stats, e.g. DIAMETER method: method used to obtain the stats: e.g. max diff --git a/hydromt_delft3dfm/workflows/mesh.py b/hydromt_delft3dfm/workflows/mesh.py index 8a507289..a5c27d19 100644 --- a/hydromt_delft3dfm/workflows/mesh.py +++ b/hydromt_delft3dfm/workflows/mesh.py @@ -4,11 +4,10 @@ from typing import List, Union import numpy as np -from shapely.geometry import LineString, MultiLineString, Polygon, MultiPolygon, box -from shapely.wkt import loads, dumps - from hydrolib.core.dflowfm import Branch, Network from meshkernel import GeometryList +from shapely.geometry import LineString, MultiLineString, MultiPolygon, Polygon, box +from shapely.wkt import dumps, loads logger = logging.getLogger(__name__) @@ -39,9 +38,10 @@ def mesh1d_add_branch( branches (Union[ LineString, MultiLineString, List[Union[LineString, MultiLineString]] ]): Geometry object(s) for which the branch is created node_distance (Union[float, int]): Preferred node distance between branch nodes branch_names (Union[str, list[str]]): Branch names to be used in the mesh1d object - branch_orders (Union[float, int, list[Union[float, int]]]): Branch orders to be used in the mesh1d object + branch_orders (Union[float, int, list[Union[float, int]]]): Branch orders to be used in the mesh1d object. - Returns: + Returns + ------- List[str]: List of names of added branches """ if node_distance == np.inf: @@ -75,6 +75,7 @@ def mesh1d_add_branch( def round_geometry(geometry, rounding_precision: int = 6): """ Round the coordinates of the geometry object to the provided precision. + Parameters ---------- geometry @@ -82,6 +83,7 @@ def round_geometry(geometry, rounding_precision: int = 6): rounding_preicision: int, optional Round coordinates to the specified number of digits. Defaults to 6. + Returns ------- A shapely geometry object. @@ -198,7 +200,8 @@ def links1d2d_add_links_1d_to_2d_include_boundary( within (Union[Polygon, MultiPolygon], optional): Area within which connections are made. Defaults to None. max_length (float, optional): Max edge length. Defaults to None. - See also: + See Also + -------- links1d2d_add_links_1d_to_2d """ # Load 1d and 2d in meshkernel @@ -381,7 +384,6 @@ def links1d2d_add_links_2d_to_1d_lateral( within (Union[Polygon, MultiPolygon], optional): Clipping polygon for 2d mesh that is. Defaults to None. max_length (float, optional): Max edge length. Defaults to None. """ - # Load 1d and 2d in meshkernel network._mesh1d._set_mesh1d() network._mesh2d._set_mesh2d() diff --git a/hydromt_delft3dfm/workflows/roughness.py b/hydromt_delft3dfm/workflows/roughness.py index a0b23b39..13f1bb47 100644 --- a/hydromt_delft3dfm/workflows/roughness.py +++ b/hydromt_delft3dfm/workflows/roughness.py @@ -1,17 +1,7 @@ # -*- coding: utf-8 -*- -import configparser import logging -import geopandas as gpd -import hydromt.io -import numpy as np -import pandas as pd -import shapely -from hydromt import config -from scipy.spatial import distance -from shapely.geometry import LineString, Point - logger = logging.getLogger(__name__)