Skip to content

Commit 05f23e3

Browse files
MAINT: update pre-commit, ruff rules (#207)
<!--pre-commit.ci start--> updates: - [github.com/astral-sh/ruff-pre-commit: v0.12.8 → v0.14.6](astral-sh/ruff-pre-commit@v0.12.8...v0.14.6) - [github.com/pre-commit/mirrors-mypy: v1.17.1 → v1.18.2](pre-commit/mirrors-mypy@v1.17.1...v1.18.2) <!--pre-commit.ci end--> Also: + update ruff rules + fix ruff issues --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Pieter Roggemans <pieter.roggemans@gmail.com>
1 parent d37ced1 commit 05f23e3

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+399
-503
lines changed

.pre-commit-config.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,13 @@ ci:
44

55
repos:
66
- repo: https://github.com/astral-sh/ruff-pre-commit
7-
rev: "v0.12.8"
7+
rev: "v0.14.6"
88
hooks:
99
- id: ruff-format
1010
- id: ruff
11-
# args: [ --fix ]
11+
args: [ --fix ]
1212
- repo: https://github.com/pre-commit/mirrors-mypy
13-
rev: "v1.17.1"
13+
rev: "v1.18.2"
1414
hooks:
1515
- id: mypy
1616
additional_dependencies: [pandas-stubs<2.3, types-python-dateutil, types-requests]

cropclassification/calc_cover.py

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@
2323
def run_cover(
2424
config_paths: list[Path],
2525
default_basedir: Path,
26-
config_overrules: list[str] = [],
27-
):
26+
config_overrules: list[str] | None = None,
27+
) -> None:
2828
"""Run a the cover marker using the setting in the config_paths.
2929
3030
Args:
@@ -43,7 +43,7 @@ def run_cover(
4343
# Main initialisation of the logging
4444
log_level = conf.general.get("log_level")
4545
log_dir = conf.paths.getpath("log_dir")
46-
global logger
46+
global logger # noqa: PLW0603
4747
logger = log_helper.main_log_init(log_dir, __name__, log_level)
4848

4949
logger.warning("This is a POC for a cover marker, so not for operational use!")
@@ -274,7 +274,7 @@ def run_cover(
274274
logging.shutdown()
275275

276276

277-
def _categorize_pred(x):
277+
def _categorize_pred(x: float | str) -> str:
278278
if pd.isna(x):
279279
return "NODATA"
280280
try:
@@ -290,16 +290,16 @@ def _categorize_pred(x):
290290

291291

292292
def _calc_cover(
293-
input_parcel_path,
294-
timeseries_periodic_dir,
295-
images_to_use,
293+
input_parcel_path: Path,
294+
timeseries_periodic_dir: Path,
295+
images_to_use: dict[str, conf.ImageConfig],
296296
start_date: datetime,
297297
end_date: datetime,
298298
parcel_columns: list[str] | None,
299299
output_path: Path,
300300
output_geo_path: Path | None = None,
301301
force: bool = False,
302-
):
302+
) -> None:
303303
logger.info(f"start processing {output_path}")
304304

305305
if output_path.exists():
@@ -490,7 +490,9 @@ def _calc_cover(
490490
shutil.rmtree(tmp_dir)
491491

492492

493-
def _select_parcels_BMG_MEG_MEV_EEF(input_geo_path, output_geo_path):
493+
def _select_parcels_BMG_MEG_MEV_EEF(
494+
input_geo_path: Path, output_geo_path: Path
495+
) -> None:
494496
"""Select parcels based on the cover marker."""
495497
# Select the relevant parcels based on the cover marker
496498
info = gfo.get_layerinfo(input_geo_path)
@@ -539,7 +541,7 @@ def _select_parcels_BMG_MEG_MEV_EEF(input_geo_path, output_geo_path):
539541
gfo.copy_layer(input_geo_path, output_geo_path, where=where)
540542

541543

542-
def _select_parcels_EEF(input_geo_path, output_geo_path):
544+
def _select_parcels_EEF(input_geo_path: Path, output_geo_path: Path) -> None:
543545
"""Select parcels based on the cover marker."""
544546
# Select the relevant parcels based on the cover marker
545547
info = gfo.get_layerinfo(input_geo_path)
@@ -566,7 +568,7 @@ def _select_parcels_EEF(input_geo_path, output_geo_path):
566568
gfo.copy_layer(input_geo_path, output_geo_path, where=where)
567569

568570

569-
def _select_parcels(input_geo_path, output_geo_path):
571+
def _select_parcels(input_geo_path: Path, output_geo_path: Path) -> None:
570572
"""Select parcels based on the cover marker."""
571573
# Select the relevant parcels based on the cover marker
572574
info = gfo.get_layerinfo(input_geo_path)
@@ -593,7 +595,7 @@ def _select_parcels(input_geo_path, output_geo_path):
593595
gfo.copy_layer(input_geo_path, output_geo_path, where=where)
594596

595597

596-
def report():
598+
def report() -> None:
597599
"""Create a report for the cover marker."""
598600
# Read parcels selected to be controlled for the cover marker
599601
prc_selectie_path = Path(

cropclassification/calc_cropclass.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@
2323
def run_cropclass(
2424
config_paths: list[Path],
2525
default_basedir: Path,
26-
config_overrules: list[str] = [],
27-
):
26+
config_overrules: list[str] | None = None,
27+
) -> None:
2828
"""Runs a crop classification marker using the setting in the config_paths.
2929
3030
Args:
@@ -80,7 +80,7 @@ def run_cropclass(
8080
logger.info(
8181
"Write new config_used.ini, because some parameters might have been added"
8282
)
83-
with open(config_used_path, "w") as config_used_file:
83+
with config_used_path.open("w") as config_used_file:
8484
conf.config.write(config_used_file)
8585
else:
8686
# Copy the config files to a config dir for later notice
@@ -101,7 +101,7 @@ def run_cropclass(
101101

102102
# Write the resolved complete config, so it can be reused
103103
logger.info("Write config_used.ini, so it can be reused later on")
104-
with open(config_used_path, "w") as config_used_file:
104+
with config_used_path.open("w") as config_used_file:
105105
conf.config.write(config_used_file)
106106

107107
# Read the info about the run

cropclassification/calc_periodic_mosaic.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from cropclassification.util import mosaic_util
99

1010

11-
def calc_periodic_mosaic_task(config_paths: list[Path], default_basedir: Path):
11+
def calc_periodic_mosaic_task(config_paths: list[Path], default_basedir: Path) -> None:
1212
"""Runs a periodic mosaic using the setting in the config_paths.
1313
1414
Args:

cropclassification/helpers/config_helper.py

Lines changed: 33 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ def __init__(
3939
imageprofile_name: str,
4040
imageprofile: ImageProfile | None = None,
4141
bands: list[str] | None = None,
42-
):
42+
) -> None:
4343
"""Constructor for ImageConfig.
4444
4545
Args:
@@ -62,9 +62,9 @@ def __init__(
6262
def read_config(
6363
config_paths: list[Path] | Path | None,
6464
default_basedir: Path | None = None,
65-
overrules: list[str] = [],
65+
overrules: list[str] | None = None,
6666
preload_defaults: bool = True,
67-
):
67+
) -> None:
6868
"""Read cropclassification configuration file(s).
6969
7070
Args:
@@ -102,10 +102,13 @@ def read_config(
102102
if not config_path.exists():
103103
raise ValueError(f"Config file doesn't exist: {config_path}")
104104

105+
if overrules is None:
106+
overrules = []
107+
105108
# If there are overrules, write them to a temporary configuration file.
106-
global config_overrules
109+
global config_overrules # noqa: PLW0603
107110
config_overrules = overrules
108-
global config_overrules_path
111+
global config_overrules_path # noqa: PLW0603
109112
config_overrules_path = None
110113
if len(config_overrules) > 0:
111114
tmp_dir = Path(tempfile.gettempdir())
@@ -131,12 +134,12 @@ def read_config(
131134
overrules_parser[section][parameter] = value
132135

133136
# Write to temp file and add file to config_paths
134-
with open(config_overrules_path, "w") as overrules_file:
137+
with config_overrules_path.open("w") as overrules_file:
135138
overrules_parser.write(overrules_file)
136139
config_paths.append(config_overrules_path)
137140

138141
# Read and parse the config files
139-
global config
142+
global config # noqa: PLW0603
140143
config = configparser.ConfigParser(
141144
interpolation=configparser.ExtendedInterpolation(),
142145
converters={
@@ -195,40 +198,40 @@ def read_config(
195198
tmp_dir=tmp_dir_str
196199
)
197200

198-
global config_paths_used
201+
global config_paths_used # noqa: PLW0603
199202
config_paths_used = config_paths
200203

201204
# Now set global variables to each section as shortcuts
202-
global general
205+
global general # noqa: PLW0603
203206
general = config["general"]
204-
global calc_timeseries_params
207+
global calc_timeseries_params # noqa: PLW0603
205208
calc_timeseries_params = config["calc_timeseries_params"]
206-
global calc_marker_params
209+
global calc_marker_params # noqa: PLW0603
207210
calc_marker_params = config["calc_marker_params"]
208-
global calc_periodic_mosaic_params
211+
global calc_periodic_mosaic_params # noqa: PLW0603
209212
if "calc_periodic_mosaic_params" in config:
210213
calc_periodic_mosaic_params = config["calc_periodic_mosaic_params"]
211214
else:
212215
calc_periodic_mosaic_params = None
213-
global roi
216+
global roi # noqa: PLW0603
214217
roi = config["roi"]
215-
global period
218+
global period # noqa: PLW0603
216219
period = config["period"]
217-
global images
220+
global images # noqa: PLW0603
218221
images = config["images"]
219-
global marker
222+
global marker # noqa: PLW0603
220223
marker = config["marker"]
221-
global timeseries
224+
global timeseries # noqa: PLW0603
222225
timeseries = config["timeseries"]
223-
global preprocess
226+
global preprocess # noqa: PLW0603
224227
preprocess = config["preprocess"]
225-
global classifier
228+
global classifier # noqa: PLW0603
226229
classifier = config["classifier"]
227-
global postprocess
230+
global postprocess # noqa: PLW0603
228231
postprocess = config["postprocess"]
229-
global columns
232+
global columns # noqa: PLW0603
230233
columns = config["columns"]
231-
global paths
234+
global paths # noqa: PLW0603
232235
paths = config["paths"]
233236

234237
# Check some parameters that should be overriden to have a valid config
@@ -239,7 +242,7 @@ def read_config(
239242
raise ValueError("paths.images_periodic_dir must be overridden")
240243

241244
# Load image profiles
242-
global image_profiles
245+
global image_profiles # noqa: PLW0603
243246
image_profiles_config_filepath = paths.getpath("image_profiles_config_filepath")
244247
if image_profiles_config_filepath is not None:
245248
image_profiles = _get_image_profiles(
@@ -250,12 +253,12 @@ def read_config(
250253
image_profiles = {}
251254

252255

253-
def parse_image_config(input) -> dict[str, ImageConfig]:
256+
def parse_image_config(image_config: str) -> dict[str, ImageConfig]:
254257
"""Parses the json input to a dictionary of ImageConfig objects."""
255258
result = None
256259
imageconfig_parsed = None
257260
try:
258-
imageconfig_parsed = json.loads(input)
261+
imageconfig_parsed = json.loads(image_config)
259262
except Exception:
260263
pass
261264

@@ -281,7 +284,7 @@ def parse_image_config(input) -> dict[str, ImageConfig]:
281284
)
282285
else:
283286
# It was no json object, so it must be a list
284-
result = {i.strip(): ImageConfig(i.strip()) for i in input.split(",")}
287+
result = {i.strip(): ImageConfig(i.strip()) for i in image_config.split(",")}
285288

286289
return result
287290

@@ -330,7 +333,7 @@ def _get_image_profiles(image_profiles_path: Path) -> dict[str, ImageProfile]:
330333
return profiles
331334

332335

333-
def _validate_image_profiles(profiles: dict[str, ImageProfile]):
336+
def _validate_image_profiles(profiles: dict[str, ImageProfile]) -> None:
334337
# Check that all base_image_profile s are actually existing image profiles.
335338
for profile in list(profiles):
336339
base_image_profile = profiles[profile].base_imageprofile
@@ -340,7 +343,7 @@ def _validate_image_profiles(profiles: dict[str, ImageProfile]):
340343
)
341344

342345

343-
def pformat_config():
346+
def pformat_config() -> str:
344347
"""Formats the config as a pretty string."""
345348
message = (
346349
f"Config files used: {pprint.pformat(config_paths_used)} \n"
@@ -351,13 +354,13 @@ def pformat_config():
351354
return message
352355

353356

354-
def as_dict():
357+
def as_dict() -> dict[str, Any]:
355358
"""Converts the config objects into a dictionary.
356359
357360
The resulting dictionary has sections as keys which point to a dict of the
358361
sections options as key => value pairs.
359362
"""
360-
the_dict = {}
363+
the_dict: dict[str, dict[str, Any]] = {}
361364
for section in config.sections():
362365
the_dict[section] = {}
363366
for key, val in config.items(section):

cropclassification/helpers/log_helper.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
import datetime
44
import logging
5-
import os
65
from pathlib import Path
76

87

@@ -21,7 +20,7 @@ def main_log_init(
2120
"""
2221
# Make sure the log dir exists
2322
if not log_dir.exists():
24-
os.makedirs(log_dir, exist_ok=True)
23+
log_dir.mkdir(parents=True, exist_ok=True)
2524

2625
# Get root logger
2726
logger = logging.getLogger("")

cropclassification/helpers/model_helper.py

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
"""Module with helper functions regarding (keras) models."""
22

3-
import glob
43
import logging
5-
import os
64
from pathlib import Path
75

86
import pandas as pd
@@ -163,11 +161,9 @@ def get_models(model_dir: Path, model_base_filename: str | None = None) -> pd.Da
163161
"""
164162
# glob search string
165163
if model_base_filename is not None:
166-
model_weight_paths = glob.glob(
167-
f"{model_dir!s}{os.sep}{model_base_filename}_*.hdf5"
168-
)
164+
model_weight_paths = list(model_dir.glob(f"{model_base_filename}_*.hdf5"))
169165
else:
170-
model_weight_paths = glob.glob(f"{model_dir!s}{os.sep}*.hdf5")
166+
model_weight_paths = list(model_dir.glob("*.hdf5"))
171167

172168
# Loop through all models and extract necessary info...
173169
model_info_list = []
@@ -224,15 +220,15 @@ def save_and_clean_models(
224220
model_save_dir: Path,
225221
model_save_base_filename: str,
226222
acc_metric_mode: str,
227-
new_model=None,
223+
new_model=None, # noqa: ANN001
228224
new_model_acc_train: float | None = None,
229225
new_model_acc_val: float | None = None,
230226
new_model_epoch: int | None = None,
231227
save_weights_only: bool = False,
232228
verbose: bool = True,
233229
debug: bool = False,
234230
only_report: bool = False,
235-
):
231+
) -> None:
236232
"""Save the new model if it is good enough.
237233
238234
Existing models are removed if they are worse than the new or other existing models.
@@ -337,7 +333,7 @@ def save_and_clean_models(
337333
logger.debug(f"DELETE {model_info['filename']}")
338334
elif model_info["path"].exists():
339335
logger.debug(f"DELETE {model_info['filename']}")
340-
os.remove(model_info["path"])
336+
model_info["path"].unlink()
341337

342338
if debug:
343339
print(f"Better one(s) found for{model_info['filename']}:")

0 commit comments

Comments
 (0)