From 797122da98837a90ece06560dfdebd3f08847349 Mon Sep 17 00:00:00 2001 From: f-PLT Date: Thu, 22 May 2025 15:04:24 -0400 Subject: [PATCH 01/19] Update dependencies and configs --- .make/.bumpversion.toml | 2 +- .make/CHANGES_MAKEFILE.md | 5 + .make/Makefile | 2 +- .make/base.make | 416 +++++++++++++++++++++++++------------- Makefile.private.example | 9 + noxfile.py | 60 +++++- poetry.lock | 59 +++++- pyproject.toml | 74 +++++++ 8 files changed, 473 insertions(+), 154 deletions(-) diff --git a/.make/.bumpversion.toml b/.make/.bumpversion.toml index 79b246b..b65116a 100644 --- a/.make/.bumpversion.toml +++ b/.make/.bumpversion.toml @@ -1,6 +1,6 @@ [tool.bumpversion] -current_version = "0.4.0" +current_version = "0.5.0" commit = true message = "Makefile version {new_version} released." tag = true diff --git a/.make/CHANGES_MAKEFILE.md b/.make/CHANGES_MAKEFILE.md index 20bccc5..9770eb8 100644 --- a/.make/CHANGES_MAKEFILE.md +++ b/.make/CHANGES_MAKEFILE.md @@ -5,6 +5,11 @@ [//]: # (New changes here in list form) +[0.5.0](https://github.com/RolnickLab/lab-advanced-template/tree/makefile-0.5.0) (2025-03-11) +------------------------------------------------------------------------------------- +* Add venv support +* Update and fix `poetry-install-auto` target + [0.4.0](https://github.com/RolnickLab/lab-advanced-template/tree/makefile-0.4.0) (2024-10-30) ------------------------------------------------------------------------------------- diff --git a/.make/Makefile b/.make/Makefile index 08d0974..d4eb4f2 100644 --- a/.make/Makefile +++ b/.make/Makefile @@ -10,7 +10,7 @@ # files to include. ######################################################################################## PROJECT_PATH := $(dir $(abspath $(firstword $(MAKEFILE_LIST)))) -MAKEFILE_VERSION := 0.4.0 +MAKEFILE_VERSION := 0.5.0 BUMP_TOOL := bump-my-version BUMP_CONFIG_FILE := $(PROJECT_PATH).bumpversion.toml diff --git a/.make/base.make b/.make/base.make index cd6622d..3f38651 100644 --- a/.make/base.make +++ b/.make/base.make @@ -3,7 +3,7 @@ # If necessary, override the corresponding variable and/or target, or create new ones # in one of the following files, depending on the nature of the override : # -# Makefile.variables, Makefile.targets or Makefile.private`, +# Makefile.variables, Makefile.targets or Makefile.private, # # The only valid reason to modify this file is to fix a bug or to add new # files to include. @@ -16,7 +16,7 @@ PROJECT_PATH := $(dir $(abspath $(firstword $(MAKEFILE_LIST)))) MAKEFILE_NAME := $(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST)) SHELL := /usr/bin/env bash BUMP_TOOL := bump-my-version -MAKEFILE_VERSION := 0.4.0 +MAKEFILE_VERSION := 0.5.0 DOCKER_COMPOSE ?= docker compose AUTO_INSTALL ?= @@ -26,12 +26,22 @@ CONDA_TOOL := conda CONDA_ENVIRONMENT ?= CONDA_YES_OPTION ?= +# Default environment to install package +# Can be overridden in Makefile.private file +DEFAULT_INSTALL_ENV ?= +DEFAULT_POETRY_INSTALL_ENV ?= + # Colors _SECTION := \033[1m\033[34m _TARGET := \033[36m _NORMAL := \033[0m .DEFAULT_GOAL := help + +# Project and Private variables and targets import to override variables for local +# This is to make sure, sometimes the Makefile includes don't work. +-include Makefile.variables +-include Makefile.private ## -- Informative targets ------------------------------------------------------------------------------------------- ## .PHONY: all @@ -70,150 +80,63 @@ targets: help version: ## display current version @echo "version: $(APP_VERSION)" -## -- Conda targets ------------------------------------------------------------------------------------------------- ## +## -- Virtualenv targets -------------------------------------------------------------------------------------------- ## -.PHONY: conda-install -conda-install: ## Install Conda on your local machine - @echo "Looking for [$(CONDA_TOOL)]..."; \ - $(CONDA_TOOL) --version; \ - if [ $$? != "0" ]; then \ - echo " "; \ - echo "Your defined Conda tool [$(CONDA_TOOL)] has not been found."; \ - echo " "; \ - echo "If you know you already have [$(CONDA_TOOL)] or some other Conda tool installed,"; \ - echo "Check your [CONDA_TOOL] variable in the Makefile.private for typos."; \ - echo " "; \ - echo "If your conda tool has not been initiated through your .bashrc file,"; \ - echo "consider using the full path to its executable instead when"; \ - echo "defining your [CONDA_TOOL] variable"; \ - echo " "; \ - echo "If in doubt, don't install Conda and manually create and activate"; \ - echo "your own Python environment."; \ - echo " "; \ - echo -n "Would you like to install Miniconda ? [y/N]: "; \ - read ans; \ - case $$ans in \ - [Yy]*) \ - echo "Fetching and installing miniconda"; \ - echo " "; \ - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh; \ - bash ~/miniconda.sh -b -p $${HOME}/.conda; \ - export PATH=$${HOME}/.conda/bin:$$PATH; \ - conda init; \ - /usr/bin/rm ~/miniconda.sh; \ - ;; \ - *) \ - echo "Skipping installation."; \ - echo " "; \ - ;; \ - esac; \ - else \ - echo "Conda tool [$(CONDA_TOOL)] has been found, skipping installation"; \ - fi; +VENV_PATH := $(PROJECT_PATH).venv +VENV_ACTIVATE := $(VENV_PATH)/bin/activate -.PHONY: conda-create-env -conda-create-env: conda-install ## Create a local Conda environment based on `environment.yml` file - @$(CONDA_TOOL) env create $(CONDA_YES_OPTION) -f environment.yml +.PHONY: venv-create +venv-create: ## Create a virtualenv '.venv' at the root of the project folder + @virtualenv $(VENV_PATH) + @make -s venv-activate -.PHONY: conda-env-info -conda-env-info: ## Print information about active Conda environment using - @$(CONDA_TOOL) info +.PHONY: venv-activate +venv-activate: ## Print out the shell command to activate the project's virtualenv. + @echo "source $(VENV_ACTIVATE)" -.PHONY: _conda-poetry-install -_conda-poetry-install: - @$(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) python --version; \ - if [ $$? != "0" ]; then \ - echo "Target environment doesn't seem to exist..."; \ - if [ "$(AUTO_INSTALL)" = "true" ]; then \ - ans="y";\ - else \ - echo ""; \ - echo -n "Do you want to create it? [y/N] "; \ - read ans; \ - fi; \ - case $$ans in \ - [Yy]*) \ - echo "Creating conda environment : [$(CONDA_ENVIRONMENT)]"; \ - make -s conda-create-env; \ - ;; \ - *) \ - echo "Exiting..."; \ - exit 1;\ - ;; \ - esac;\ - fi; - $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) $(CONDA_TOOL) install $(CONDA_YES_OPTION) -c conda-forge poetry; \ - CURRENT_VERSION=$$($(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) poetry --version | awk '{print $$NF}' | tr -d ')'); \ - REQUIRED_VERSION="1.6.0"; \ - if [ "$$(printf '%s\n' "$$REQUIRED_VERSION" "$$CURRENT_VERSION" | sort -V | head -n1)" != "$$REQUIRED_VERSION" ]; then \ - echo "Poetry installed version $$CURRENT_VERSION is less than minimal version $$REQUIRED_VERSION, fixing urllib3 version to prevent problems"; \ - $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) poetry add "urllib3<2.0.0"; \ - fi; +## -- Poetry targets ------------------------------------------------------------------------------------------------ ## -.PHONY:conda-poetry-install -conda-poetry-install: ## Install Poetry in the project's Conda environment. Will fail if Conda is not found +.PHONY: poetry-install-auto +poetry-install-auto: ## Install Poetry in Conda environment, or with pipx in a virtualenv if Conda not found @poetry --version; \ if [ $$? != "0" ]; then \ echo "Poetry not found, proceeding to install Poetry..."; \ - echo "Looking for [$(CONDA_TOOL)]...";\ - $(CONDA_TOOL) --version; \ - if [ $$? != "0" ]; then \ - echo "$(CONDA_TOOL) not found; Poetry will not be installed"; \ - else \ - echo "Installing Poetry with Conda in [$(CONDA_ENVIRONMENT)] environment"; \ - make -s _conda-poetry-install; \ - fi; \ - else \ - echo ""; \ - echo "Poetry has been found on this system :"; \ - echo " Install location: $$(which poetry)"; \ - echo ""; \ - if [ "$(AUTO_INSTALL)" = "true" ]; then \ - ans="y";\ - else \ - echo -n "Would you like to install poetry in the project's conda environment anyway ? [y/N]: "; \ - read ans; \ + if [ "$(DEFAULT_POETRY_INSTALL_ENV)" == "conda" ]; then \ + ans_where="conda"; \ + elif [ "$(DEFAULT_POETRY_INSTALL_ENV)" == "venv" ]; then \ + ans_where="venv"; \ + else\ + echo -n "Where would you like to install Poetry, in a dedicated virtualenv (venv), or a conda environment? [venv/conda]: "; \ + read ans_where; \ fi; \ - case $$ans in \ - [Yy]*) \ - echo "Installing Poetry with Conda in [$(CONDA_ENVIRONMENT)] environment"; \ - make -s _conda-poetry-install; \ + case $$ans_where in \ + "venv" | "Venv" |"VENV") \ + make AUTO_INSTALL=true -s poetry-install-venv; \ ;; \ - *) \ - echo "Skipping installation."; \ - echo " "; \ + "conda" | "Conda" | "CONDA") \ + echo "Installing poetry with Conda"; \ + make AUTO_INSTALL=true -s conda-poetry-install; \ ;; \ + *) \ + echo ""; \ + echo -e "\e[1;39;41m-- WARNING --\e[0m Option $$ans_how not found, exiting process."; \ + echo ""; \ + exit 1; \ esac; \ fi; -.PHONY: conda-poetry-uninstall -conda-poetry-uninstall: ## Uninstall Poetry located in currently active Conda environment - $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) $(CONDA_TOOL) remove $(CONDA_YES_OPTION) poetry - -.PHONY: conda-clean-env -conda-clean-env: ## Completely removes local project's Conda environment - $(CONDA_TOOL) env remove $(CONDA_YES_OPTION) -n $(CONDA_ENVIRONMENT) +.PHONY: _pipx_install_poetry +_pipx_install_poetry: + @output="$$(pip install poetry --dry-run)"; \ + if echo "$$output" | grep -q computecanada ; then \ + echo ""; \ + echo -e "\e[1;39;41m-- WARNING --\e[0m Compute Canada (DRAC) environment detected: Installing Poetry < 2.0.0"; \ + echo ""; \ + pipx install 'poetry<2.0.0' ; \ + else \ + pipx install poetry ; \ + fi; -## -- Poetry targets ------------------------------------------------------------------------------------------------ ## - -.PHONY: poetry-install-auto -poetry-install-auto: ## Install Poetry in Conda environment, or with pipx in a virtualenv if Conda not found - @poetry --version; \ - if [ $$? != "0" ]; then \ - echo "Poetry not found, proceeding to install Poetry..."; \ - echo "Looking for [$(CONDA_TOOL)]...";\ - $(CONDA_TOOL) --version; \ - if [ $$? != "0" ]; then \ - echo "$(CONDA_TOOL) not found, trying with pipx"; \ - pipx --version; \ - if [ $$? != "0" ]; then \ - make AUTO_INSTALL=true -s poetry-install-venv; \ - fi; \ - else \ - echo "Installing poetry with Conda"; \ - make AUTO_INSTALL=true -s conda-poetry-install; \ - fi; \ - fi; .PHONY: poetry-install poetry-install: ## Install standalone Poetry using pipx. Will ask where to install pipx. @@ -267,7 +190,7 @@ poetry-install: ## Install standalone Poetry using pipx. Will ask where to insta esac; \ else \ echo "Installing Poetry"; \ - pipx install poetry; \ + make -s _pipx_install_poetry; \ fi; \ ;; \ *) \ @@ -288,8 +211,10 @@ poetry-install-venv: ## Install standalone Poetry. Will install pipx in $HOME/.p source $(PIPX_VENV_PATH)/bin/activate; \ pip3 install pipx; \ pipx ensurepath; \ + source $(PIPX_VENV_PATH)/bin/activate && make -s _pipx_install_poetry ; \ + else \ + make -s _pipx_install_poetry ; \ fi; - source $(PIPX_VENV_PATH)/bin/activate && pipx install poetry .PHONY: poetry-install-local poetry-install-local: ## Install standalone Poetry. Will install pipx with locally available pip. @@ -300,7 +225,7 @@ poetry-install-local: ## Install standalone Poetry. Will install pipx with local pipx ensurepath; \ fi; @echo "Installing Poetry" - @pipx install poetry + @make -s _pipx_install_poetry .PHONY: poetry-env-info @@ -319,6 +244,10 @@ poetry-create-env: ## Create a Poetry managed environment for the project (Outsi @echo "Use and for more information" @echo"" +.PHONY: poetry-activate +poetry-activate: ## Print the shell command to activate the project's poetry env. + poetry env activate + .PHONY: poetry-remove-env poetry-remove-env: ## Remove current project's Poetry managed environment. @if [ "$(AUTO_INSTALL)" = "true" ]; then \ @@ -339,6 +268,10 @@ poetry-remove-env: ## Remove current project's Poetry managed environment. echo "If the active environment listed above is a Conda environment,"; \ echo "Choosing to delete it will have no effect; use the target "; \ echo""; \ + echo""; \ + echo "If the active environment listed above is a venv environment,"; \ + echo "Choosing to delete it will have no effect; use the bash command $ rm -rf "; \ + echo""; \ echo -n "Would you like delete the environment listed above? [y/N]: "; \ read ans_env; \ else \ @@ -370,7 +303,15 @@ poetry-uninstall: poetry-remove-env ## Uninstall pipx-installed Poetry and the c fi; \ case $$ans in \ [Yy]*) \ - pipx uninstall poetry; \ + pipx --version ; \ + if [ $$? != "0" ]; then \ + echo "" ; \ + echo "Pipx not found globally, trying with $(PIPX_VENV_PATH) env" ;\ + echo "" ; \ + source $(PIPX_VENV_PATH)/bin/activate && pipx uninstall poetry ; \ + else \ + pipx uninstall poetry ; \ + fi; \ ;; \ *) \ echo "Skipping uninstallation."; \ @@ -435,32 +376,217 @@ poetry-uninstall-venv: poetry-remove-env ## Uninstall pipx-installed Poetry, the ;; \ esac; \ -## -- Install targets (All install targets will install Poetry if not found using `make poetry-install-auto`)-------- ## +## -- Conda targets ------------------------------------------------------------------------------------------------- ## + +.PHONY: conda-install +conda-install: ## Install Conda on your local machine + @echo "Looking for [$(CONDA_TOOL)]..."; \ + $(CONDA_TOOL) --version; \ + if [ $$? != "0" ]; then \ + echo " "; \ + echo "Your defined Conda tool [$(CONDA_TOOL)] has not been found."; \ + echo " "; \ + echo "If you know you already have [$(CONDA_TOOL)] or some other Conda tool installed,"; \ + echo "Check your [CONDA_TOOL] variable in the Makefile.private for typos."; \ + echo " "; \ + echo "If your conda tool has not been initiated through your .bashrc file,"; \ + echo "consider using the full path to its executable instead when"; \ + echo "defining your [CONDA_TOOL] variable"; \ + echo " "; \ + echo "If in doubt, don't install Conda and manually create and activate"; \ + echo "your own Python environment."; \ + echo " "; \ + echo -n "Would you like to install Miniconda ? [y/N]: "; \ + read ans; \ + case $$ans in \ + [Yy]*) \ + echo "Fetching and installing miniconda"; \ + echo " "; \ + wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh; \ + bash ~/miniconda.sh -b -p $${HOME}/.conda; \ + export PATH=$${HOME}/.conda/bin:$$PATH; \ + conda init; \ + /usr/bin/rm ~/miniconda.sh; \ + ;; \ + *) \ + echo "Skipping installation."; \ + echo " "; \ + ;; \ + esac; \ + else \ + echo "Conda tool [$(CONDA_TOOL)] has been found, skipping installation"; \ + fi; + +.PHONY: conda-create-env +conda-create-env: conda-install ## Create a local Conda environment based on 'environment.yml' file + @$(CONDA_TOOL) env create $(CONDA_YES_OPTION) -f environment.yml + +.PHONY: conda-env-info +conda-env-info: ## Print information about active Conda environment using + @$(CONDA_TOOL) info + +.PHONY: conda-activate +conda-activate: ## Print the shell command to activate the project's Conda env. + @echo "$(CONDA_TOOL) activate $(CONDA_ENVIRONMENT)" + +.PHONY: _conda-poetry-install +_conda-poetry-install: + @$(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) python --version; \ + if [ $$? != "0" ]; then \ + echo "Target environment doesn't seem to exist..."; \ + if [ "$(AUTO_INSTALL)" = "true" ]; then \ + ans="y";\ + else \ + echo ""; \ + echo -n "Do you want to create it? [y/N] "; \ + read ans; \ + fi; \ + case $$ans in \ + [Yy]*) \ + echo "Creating conda environment : [$(CONDA_ENVIRONMENT)]"; \ + make -s conda-create-env; \ + ;; \ + *) \ + echo "Exiting..."; \ + exit 1;\ + ;; \ + esac;\ + fi; + $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) $(CONDA_TOOL) install $(CONDA_YES_OPTION) -c conda-forge poetry; \ + CURRENT_VERSION=$$($(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) poetry --version | awk '{print $$NF}' | tr -d ')'); \ + REQUIRED_VERSION="1.6.0"; \ + if [ "$$(printf '%s\n' "$$REQUIRED_VERSION" "$$CURRENT_VERSION" | sort -V | head -n1)" != "$$REQUIRED_VERSION" ]; then \ + echo "Poetry installed version $$CURRENT_VERSION is less than minimal version $$REQUIRED_VERSION, fixing urllib3 version to prevent problems"; \ + $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) poetry add "urllib3<2.0.0"; \ + fi; + +.PHONY:conda-poetry-install +conda-poetry-install: ## Install Poetry in the project's Conda environment. Will fail if Conda is not found + @poetry --version; \ + if [ $$? != "0" ]; then \ + echo "Poetry not found, proceeding to install Poetry..."; \ + echo "Looking for [$(CONDA_TOOL)]...";\ + $(CONDA_TOOL) --version; \ + if [ $$? != "0" ]; then \ + echo "$(CONDA_TOOL) not found; Poetry will not be installed"; \ + else \ + echo "Installing Poetry with Conda in [$(CONDA_ENVIRONMENT)] environment"; \ + make -s _conda-poetry-install; \ + fi; \ + else \ + echo ""; \ + echo "Poetry has been found on this system :"; \ + echo " Install location: $$(which poetry)"; \ + echo ""; \ + if [ "$(AUTO_INSTALL)" = "true" ]; then \ + ans="y";\ + else \ + echo -n "Would you like to install poetry in the project's conda environment anyway ? [y/N]: "; \ + read ans; \ + fi; \ + case $$ans in \ + [Yy]*) \ + echo "Installing Poetry with Conda in [$(CONDA_ENVIRONMENT)] environment"; \ + make -s _conda-poetry-install; \ + ;; \ + *) \ + echo "Skipping installation."; \ + echo " "; \ + ;; \ + esac; \ + fi; + +.PHONY: conda-poetry-uninstall +conda-poetry-uninstall: ## Uninstall Poetry located in currently active Conda environment + $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) $(CONDA_TOOL) remove $(CONDA_YES_OPTION) poetry + +.PHONY: conda-clean-env +conda-clean-env: ## Completely removes local project's Conda environment + $(CONDA_TOOL) env remove $(CONDA_YES_OPTION) -n $(CONDA_ENVIRONMENT) + +## -- Install targets (All install targets will install Poetry if not found using 'make poetry-install-auto')-------- ## + +POETRY_COMMAND := poetry + +ifeq ($(DEFAULT_INSTALL_ENV),venv) +POETRY_COMMAND := source $(VENV_ACTIVATE) && poetry +else ifeq ($(DEFAULT_INSTALL_ENV),poetry) +POETRY_COMMAND := poetry +else ifeq ($(DEFAULT_INSTALL_ENV),conda) +POETRY_COMMAND := $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) poetry +endif + +.PHONY: _check-env +_check-env: + @if ! [ $(DEFAULT_INSTALL_ENV) ]; then \ + echo -e "\e[1;39;41m-- WARNING --\e[0m No installation environment have been defined." ; \ + echo "" ; \ + echo "Defaulting to Poetry managed environment - Poetry will either use activated environment, or '.venv'," ; \ + echo "if found, or create and manage it's own environment if not." ; \ + elif [ $(DEFAULT_INSTALL_ENV) = "venv" ]; then \ + if [ ! -f $(VENV_ACTIVATE) ]; then \ + make -s venv-create ;\ + fi; \ + elif [ $(DEFAULT_INSTALL_ENV) = "conda" ]; then \ + if ! $(CONDA_TOOL) env list | grep -q $(CONDA_ENVIRONMENT) ; then \ + make -s conda-create-env ; \ + fi; \ + fi; + +.PHONY: _remind-env-activate +_remind-env-activate: + @echo "" + @echo "Activate your environment using the following command:" + @echo "" + @if ! [ $(DEFAULT_INSTALL_ENV) ] || [ $(DEFAULT_INSTALL_ENV) = "poetry" ]; then \ + make -s poetry-env-activate ; \ + echo "" ; \ + echo "You can also use the eval bash command : eval \$$(make poetry-activate)"; \ + echo "" ; \ + echo "The environment can also be used through the 'poetry run ' command."; \ + echo "" ; \ + echo " Ex: poetry run python "; \ + elif [ $(DEFAULT_INSTALL_ENV) = "venv" ]; then \ + make -s venv-activate ; \ + echo "" ; \ + echo "You can also use the eval bash command : eval \$$(make venv-activate)"; \ + elif [ $(DEFAULT_INSTALL_ENV) = "conda" ]; then \ + make -s conda-activate ; \ + echo "" ; \ + echo "You can also use the eval bash command : eval \$$(make conda-activate)"; \ + fi; + @echo "" + +test-echo: + @echo "use the eval bash command : eval \$$(make poetry-activate)" .PHONY: install install: install-precommit ## Install the application package, developer dependencies and pre-commit hook .PHONY: install-precommit -install-precommit: install-dev## Install the pre-commit hooks (also installs developer dependencies) +install-precommit: install-dev ## Install the pre-commit hooks (also installs developer dependencies) @if [ -f .git/hooks/pre-commit ]; then \ echo "Pre-commit hook found"; \ else \ echo "Pre-commit hook not found, proceeding to configure it"; \ - poetry run pre-commit install; \ + $(POETRY_COMMAND) run pre-commit install; \ fi; .PHONY: install-dev -install-dev: poetry-install-auto ## Install the application along with developer dependencies - @poetry install --with dev +install-dev: poetry-install-auto _check-env ## Install the application along with developer dependencies + @$(POETRY_COMMAND) install --with dev + @make -s _remind-env-activate .PHONY: install-with-lab -install-with-lab: poetry-install-auto ## Install the application and it's dev dependencies, including Jupyter Lab - @poetry install --with dev --with lab +install-with-lab: poetry-install-auto _check-env ## Install the application and it's dev dependencies, including Jupyter Lab + @$(POETRY_COMMAND) install --with dev --with lab + @make -s _remind-env-activate .PHONY: install-package -install-package: poetry-install-auto ## Install the application package only - @poetry install +install-package: poetry-install-auto _check-env ## Install the application package only + @$(POETRY_COMMAND) install + @make -s _remind-env-activate ## -- Versioning targets -------------------------------------------------------------------------------------------- ## @@ -472,7 +598,7 @@ ifeq ($(filter dry, $(MAKECMDGOALS)), dry) endif .PHONY: dry -dry: ## Add the dry target for a preview of changes; ex. `make bump-major dry` +dry: ## Add the dry target for a preview of changes; ex. 'make bump-major dry' @-echo > /dev/null .PHONY: bump-major diff --git a/Makefile.private.example b/Makefile.private.example index be83cd0..278f083 100644 --- a/Makefile.private.example +++ b/Makefile.private.example @@ -24,5 +24,14 @@ DOCKER_COMPOSE := docker compose # a 'true' value will automatically install/remove without asking beforehand. AUTO_INSTALL := false +# The default environment to use. The choices are as follow: [venv, poetry, conda] +# If this is not set, the makefile will use the `poetry` command without activating +# an environment before hand. +DEFAULT_INSTALL_ENV := conda + +# The default environment where Poetry will be installed. The choices are as follow: [venv, conda] +# If this is not set, the makefile will ask the user where they want to install Poetry +DEFAULT_POETRY_INSTALL_ENV := venv + ## -- Private targets ------------------------------------------------------------------------------------------------## diff --git a/noxfile.py b/noxfile.py index f5dee8f..035337a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,22 +1,28 @@ +import re from pathlib import Path import nox +ARG_RE = re.compile(r"^-[-\w=]+$") # e.g. "-k", "--maxfail=1", "tests/foo.py" + nox.options.reuse_existing_virtualenvs = True # Reuse virtual environments nox.options.sessions = ["precommit"] def get_paths(session): package_path = Path(session.bin).parent.parent.parent + main_package = package_path / "geospatial_tools" + tests = package_path / "tests" + scripts = package_path / "scripts" return { "all": [ - package_path / "geospatial_tools", - package_path / "tests", - package_path / "scripts", + main_package, + tests, + scripts, ], "module": [ - package_path / "geospatial_tools", - package_path / "scripts", + main_package, + scripts, ], } @@ -80,6 +86,8 @@ def check(session): @nox.session() def fix(session): paths = get_paths(session) + session.run("poetry", "run", "autoflake", "-v", *paths["all"], external=True) + session.run("poetry", "run", "autopep8", *paths["all"], external=True) session.run("poetry", "run", "black", *paths["all"], external=True) session.run("poetry", "run", "isort", *paths["all"], external=True) session.run("poetry", "run", "flynt", *paths["all"], external=True) @@ -100,12 +108,30 @@ def precommit(session): session.run("poetry", "run", "pre-commit", "run", "--all-files", external=True) +@nox.session() +def autoflake(session): + paths = get_paths(session) + session.run("poetry", "run", "autoflake", "-v", *paths["all"], external=True) + + +@nox.session() +def autopep(session): + paths = get_paths(session) + session.run("poetry", "run", "autopep8", *paths["all"], external=True) + + @nox.session() def black(session): paths = get_paths(session) session.run("poetry", "run", "black", "--check", *paths["all"], external=True) +@nox.session(name="black-fix") +def black_fix(session): + paths = get_paths(session) + session.run("poetry", "run", "black", *paths["all"], external=True) + + @nox.session() def isort(session): paths = get_paths(session) @@ -118,6 +144,24 @@ def flynt(session): session.run("poetry", "run", "flynt", *paths["all"], external=True) +@nox.session(name="ruff-lint") +def ruff_lint(session): + paths = get_paths(session) + session.run("poetry", "run", "ruff", "check", *paths["all"], external=True) + + +@nox.session(name="ruff-fix") +def ruff_fix(session): + paths = get_paths(session) + session.run("poetry", "run", "ruff", "check", "--fix", *paths["all"], external=True) + + +@nox.session(name="ruff-format") +def ruff_format(session): + paths = get_paths(session) + session.run("poetry", "run", "ruff", "format", *paths["all"], external=True) + + @nox.session() def test(session): session.run("poetry", "run", "pytest", external=True) @@ -125,8 +169,12 @@ def test(session): @nox.session() def test_custom(session): + for a in session.posargs: + if not ARG_RE.match(a): + session.error(f"unsafe pytest argument detected: {a!r}") + session.run( - "poetry", "run", "pytest", external=True, *session.posargs + "poetry", "run", "python", "-m", "pytest", external=True, *session.posargs ) # Pass additional arguments directly to pytest diff --git a/poetry.lock b/poetry.lock index 0ddd3da..5c0cbdc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -225,6 +225,36 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +[[package]] +name = "autoflake" +version = "2.3.1" +description = "Removes unused imports and unused variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "autoflake-2.3.1-py3-none-any.whl", hash = "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840"}, + {file = "autoflake-2.3.1.tar.gz", hash = "sha256:c98b75dc5b0a86459c4f01a1d32ac7eb4338ec4317a4469515ff1e687ecd909e"}, +] + +[package.dependencies] +pyflakes = ">=3.0.0" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + +[[package]] +name = "autopep8" +version = "2.3.2" +description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" +optional = false +python-versions = ">=3.9" +files = [ + {file = "autopep8-2.3.2-py2.py3-none-any.whl", hash = "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128"}, + {file = "autopep8-2.3.2.tar.gz", hash = "sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758"}, +] + +[package.dependencies] +pycodestyle = ">=2.12.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} + [[package]] name = "babel" version = "2.16.0" @@ -4269,6 +4299,33 @@ files = [ {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, ] +[[package]] +name = "ruff" +version = "0.11.10" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58"}, + {file = "ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed"}, + {file = "ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f"}, + {file = "ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b"}, + {file = "ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2"}, + {file = "ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523"}, + {file = "ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125"}, + {file = "ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad"}, + {file = "ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19"}, + {file = "ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224"}, + {file = "ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1"}, + {file = "ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6"}, +] + [[package]] name = "scooby" version = "0.10.0" @@ -4913,4 +4970,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.12" -content-hash = "d1e1443c0d29d8cf6c156197f7c1f7991d093beaf1a8e88e9a9d7036f081184f" +content-hash = "956680935c77616946350cc66cac1d94dac69e8e9745a9471e882f48d8004aca" diff --git a/pyproject.toml b/pyproject.toml index afd6154..4836852 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,9 @@ docformatter = {extras = ["tomli"], version = "^1.7.5"} nbval = "^0.11.0" black = "^24.8.0" nox = "^2024.4.15" +autoflake = "^2.3.1" +autopep8 = "^2.3.2" +ruff = "^0.11.10" [tool.poetry.group.lab.dependencies] jupyterlab = "^4.0.10" @@ -118,6 +121,20 @@ exclude = [ "__pycache__" ] +[tool.autoflake] +remove-all-unused-imports = true +in-place = true +ignore-init-module-imports = true +remove-unused-variables = true +recursive = true + +[tool.autopep8] +max_line_length = 120 +in-place = true +aggressive = 2 +ignore = ["W503", "E203", "E501"] +recursive = true + [tool.black] line-length = 120 target-version = ["py311"] @@ -138,3 +155,60 @@ wrap-summaries = 120 blank = false exclude = [] recursive = true + +[tool.ruff] +line-length = 120 + +target-version = "py311" + +exclude = [ + ".git", + "migrations", + "__pycache__", +] + +[tool.ruff.lint] +select = [ + "A", # Flake8 Built ins + "E", # Error (Flake8) + "F", # Pyflakes (Flake8) + "W", # Warning (Flake8) + "I", # isort (import sorting) + "N", # Naming conventions (Pylint, etc.) + "C90", # mccabe complexity (replaces flake8 --max-complexity and mccabe) + "B", # Bugbear (common linting issues) + "UP", # pyupgrade (suggests modern Python syntax) + "PLR", # Pylint refactor + "PLE", # Pylint error (specific Pylint error rules) + "PLW", # Pylint warning (specific Pylint warning rules) + "PLC", # Pylint convention (specific Pylint convention rules) + "R", # Refactor (Pylint refactoring suggestions) + "TID", # TODO comments + "FAST",# FastAPI + "C4", # List and dict comprehensions + "DJ", # Django + "PIE", # Returns and unecessary returns + "Q", # Double quotes + "RET", # Fix return statements + "PTH", # Enforce pathlib + "ARG", # Unused argument + "FLY", # Flynt + "NPY", # Numpy specific + "PD", # Pandas specific + "RUF", # Ruff specific +] + +ignore = [ + "E203", # whitespace before ':', Black already handles this + "E266", # too many leading '#' for comments + "E501", # line too long (we enforce via line-length instead) + "RET504", + "RUF013" +] + +[tool.ruff.lint.pydocstyle] +convention = "numpy" # Corresponds to flake8's docstring-convention and docformatter style. + +[tool.ruff.lint.mccabe] +# cyclomatic complexity +max-complexity = 18 From 3a6e1200e7dc937b362c98a9cc9ec068f60ee63e Mon Sep 17 00:00:00 2001 From: f-PLT Date: Thu, 22 May 2025 15:05:24 -0400 Subject: [PATCH 02/19] Update dependencies and configs --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4836852..ca56900 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -183,7 +183,7 @@ select = [ "PLW", # Pylint warning (specific Pylint warning rules) "PLC", # Pylint convention (specific Pylint convention rules) "R", # Refactor (Pylint refactoring suggestions) - "TID", # TODO comments + "TID", # TO DO comments "FAST",# FastAPI "C4", # List and dict comprehensions "DJ", # Django From d861194c88e8fba5982b8962585f2aaf84f0039e Mon Sep 17 00:00:00 2001 From: f-PLT Date: Thu, 22 May 2025 15:14:21 -0400 Subject: [PATCH 03/19] Ruff fix lint --- .pre-commit-config.yaml | 2 +- geospatial_tools/geotools_types.py | 4 +- .../planetary_computer/sentinel_2.py | 21 +- geospatial_tools/raster.py | 41 +- geospatial_tools/stac.py | 93 ++- geospatial_tools/utils.py | 15 +- geospatial_tools/vector.py | 13 +- poetry.lock | 65 ++- pyproject.toml | 2 +- .../download_and_process.py | 4 +- .../test_planetary_computer_sentinel2.ipynb | 545 +++++++++--------- .../test_notebooks/test_stac_api_tools.ipynb | 115 ++-- tests/test_utils.py | 4 +- tests/test_vector.py | 2 +- 14 files changed, 464 insertions(+), 462 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3971ae8..eafade3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: files: '^(?!data/usa_polygon_5070\.gpkg$)(?!data/s2_grid_usa_polygon_5070\.gpkg$).*$' - repo: https://github.com/psf/black - rev: 23.12.1 + rev: 24.8.0 hooks: - id: black diff --git a/geospatial_tools/geotools_types.py b/geospatial_tools/geotools_types.py index 5ea9d0e..0c8c303 100644 --- a/geospatial_tools/geotools_types.py +++ b/geospatial_tools/geotools_types.py @@ -1,6 +1,6 @@ """This module contains constants and functions pertaining to data types.""" -from typing import Tuple, Union +from typing import Union from shapely.geometry import ( GeometryCollection, @@ -12,5 +12,5 @@ Polygon, ) -BBoxLike = Tuple[float, float, float, float] +BBoxLike = tuple[float, float, float, float] IntersectsLike = Union[Point, Polygon, LineString, MultiPolygon, MultiPoint, MultiLineString, GeometryCollection] diff --git a/geospatial_tools/planetary_computer/sentinel_2.py b/geospatial_tools/planetary_computer/sentinel_2.py index 46ebffb..9fef5f7 100644 --- a/geospatial_tools/planetary_computer/sentinel_2.py +++ b/geospatial_tools/planetary_computer/sentinel_2.py @@ -2,7 +2,6 @@ import logging import pathlib from concurrent.futures import ThreadPoolExecutor, as_completed -from typing import Optional, Union from geopandas import GeoDataFrame @@ -35,7 +34,7 @@ def __init__( sentinel2_tiling_grid_column: str, vector_features: GeoDataFrame, vector_features_column: str, - date_ranges: Optional[list[str]] = None, + date_ranges: list[str] | None = None, max_cloud_cover: int = 5, max_no_data_value: int = 5, logger: logging.Logger = LOGGER, @@ -127,7 +126,7 @@ def create_date_ranges(self, start_year: int, end_year: int, start_month: int, e ) return self.date_ranges - def find_best_complete_products(self, max_cloud_cover: Optional[int] = None, max_no_data_value: int = 5) -> dict: + def find_best_complete_products(self, max_cloud_cover: int | None = None, max_no_data_value: int = 5) -> dict: """ Finds the best complete products for each Sentinel 2 tiles. This function will filter out all products that have more than 5% of nodata values. @@ -193,7 +192,7 @@ def select_best_products_per_feature(self) -> GeoDataFrame: self.vector_features_with_products = spatial_join_results return self.vector_features_with_products - def to_file(self, output_dir: Union[str, pathlib.Path]) -> None: + def to_file(self, output_dir: str | pathlib.Path) -> None: write_results_to_file( cloud_cover=self.max_cloud_cover, successful_results=self.successful_results, @@ -208,7 +207,7 @@ def sentinel_2_complete_tile_search( date_ranges: list[str], max_cloud_cover: int, max_no_data_value: int = 5, -) -> Optional[tuple[int, str, Optional[float], Optional[float]]]: +) -> tuple[int, str, float | None, float | None] | None: client = StacSearch(PLANETARY_COMPUTER) collection = "sentinel-2-l2a" tile_ids = [tile_id] @@ -280,7 +279,7 @@ def find_best_product_per_s2_tile( def _get_best_product_id_for_each_grid_tile( s2_tile_search_results: dict, feature_s2_tiles: GeoDataFrame, logger: logging.Logger = LOGGER -) -> Optional[str]: +) -> str | None: search_result_keys = s2_tile_search_results.keys() all_keys_present = all(item in search_result_keys for item in feature_s2_tiles) if not all_keys_present: @@ -321,9 +320,9 @@ def write_best_product_ids_to_dataframe( def write_results_to_file( cloud_cover: int, successful_results: dict, - incomplete_results: Optional[list] = None, - error_results: Optional[list] = None, - output_dir: Union[str, pathlib.Path] = DATA_DIR, + incomplete_results: list | None = None, + error_results: list | None = None, + output_dir: str | pathlib.Path = DATA_DIR, logger: logging.Logger = LOGGER, ) -> dict: tile_filename = output_dir / f"data_lt{cloud_cover}cc.json" @@ -358,8 +357,8 @@ def download_and_process_sentinel2_asset( product_id: str, product_bands: list[str], collections: str = "sentinel-2-l2a", - target_projection: Optional[Union[int, str]] = None, - base_directory: Union[str, pathlib.Path] = DATA_DIR, + target_projection: int | str | None = None, + base_directory: str | pathlib.Path = DATA_DIR, delete_intermediate_files: bool = False, logger: logging.Logger = LOGGER, ) -> Asset: diff --git a/geospatial_tools/raster.py b/geospatial_tools/raster.py index f7b44c7..9f4cd12 100644 --- a/geospatial_tools/raster.py +++ b/geospatial_tools/raster.py @@ -6,7 +6,6 @@ import time from concurrent.futures import ProcessPoolExecutor from multiprocessing import cpu_count -from typing import Optional, Union import geopandas as gpd import rasterio @@ -21,11 +20,11 @@ def reproject_raster( - dataset_path: Union[str, pathlib.Path], - target_crs: Union[str, int], - target_path: Union[str, pathlib.Path], + dataset_path: str | pathlib.Path, + target_crs: str | int, + target_path: str | pathlib.Path, logger: logging.Logger = LOGGER, -) -> Union[pathlib.Path, None]: +) -> pathlib.Path | None: """ Parameters @@ -76,12 +75,12 @@ def reproject_raster( def _clip_process( - raster_image: Union[pathlib.Path, str], + raster_image: pathlib.Path | str, id_polygon: tuple[int, GeoDataFrame], - base_output_filename: Optional[str], - output_dir: Union[pathlib.Path, str], + base_output_filename: str | None, + output_dir: pathlib.Path | str, logger: logging.Logger = LOGGER, -) -> Union[tuple[int, GeoDataFrame, pathlib.Path], None]: +) -> tuple[int, GeoDataFrame, pathlib.Path] | None: """ Parameters @@ -132,17 +131,17 @@ def _clip_process( else: logger.warning( f"There was an error writing the file :[Polygon ID: {polygon_id}" - f"\nPolygon: {polygon}\nError message: {str(e)}]" + f"\nPolygon: {polygon}\nError message: {e!s}]" ) return None def clip_raster_with_polygon( - raster_image: Union[pathlib.Path, str], - polygon_layer: Union[pathlib.Path, str, GeoDataFrame], - base_output_filename: Optional[str] = None, - output_dir: Union[str, pathlib.Path] = DATA_DIR, - num_of_workers: Optional[int] = None, + raster_image: pathlib.Path | str, + polygon_layer: pathlib.Path | str | GeoDataFrame, + base_output_filename: str | None = None, + output_dir: str | pathlib.Path = DATA_DIR, + num_of_workers: int | None = None, logger: logging.Logger = LOGGER, ) -> list[pathlib.Path]: """ @@ -195,7 +194,7 @@ def clip_raster_with_polygon( polygons = gdf["geometry"] ids = gdf.index - id_polygon_list = zip(ids, polygons) + id_polygon_list = zip(ids, polygons, strict=False) logger.info(f"Clipping raster image with {len(polygons)} polygons") with ProcessPoolExecutor(max_workers=workers) as executor: futures = [ @@ -219,7 +218,7 @@ def clip_raster_with_polygon( return path_list -def get_total_band_count(raster_file_list: list[Union[pathlib.Path, str]], logger: logging.Logger = LOGGER) -> int: +def get_total_band_count(raster_file_list: list[pathlib.Path | str], logger: logging.Logger = LOGGER) -> int: """ Parameters @@ -244,7 +243,7 @@ def get_total_band_count(raster_file_list: list[Union[pathlib.Path, str]], logge def create_merged_raster_bands_metadata( - raster_file_list: list[Union[pathlib.Path, str]], logger: logging.Logger = LOGGER + raster_file_list: list[pathlib.Path | str], logger: logging.Logger = LOGGER ) -> dict: """ @@ -266,12 +265,12 @@ def create_merged_raster_bands_metadata( def merge_raster_bands( - raster_file_list: list[Union[pathlib.Path, str]], - merged_filename: Union[pathlib.Path, str], + raster_file_list: list[pathlib.Path | str], + merged_filename: pathlib.Path | str, merged_band_names: list[str] = None, merged_metadata: dict = None, logger: logging.Logger = LOGGER, -) -> Optional[pathlib.Path]: +) -> pathlib.Path | None: """ This function aims to combine multiple overlapping raster bands into a single raster image. diff --git a/geospatial_tools/stac.py b/geospatial_tools/stac.py index 3a820cc..75c77c3 100644 --- a/geospatial_tools/stac.py +++ b/geospatial_tools/stac.py @@ -3,7 +3,6 @@ import logging import pathlib import time -from typing import Optional, Union import pystac import pystac_client @@ -30,7 +29,7 @@ PLANETARY_COMPUTER_API = "https://planetarycomputer.microsoft.com/api/stac/v1" -def create_planetary_computer_catalog(max_retries=3, delay=5, logger=LOGGER) -> Union[pystac_client.Client, None]: +def create_planetary_computer_catalog(max_retries=3, delay=5, logger=LOGGER) -> pystac_client.Client | None: """ Creates a Planetary Computer Catalog Client. @@ -53,7 +52,7 @@ def create_planetary_computer_catalog(max_retries=3, delay=5, logger=LOGGER) -> return None -def catalog_generator(catalog_name, logger=LOGGER) -> Optional[pystac_client.Client]: +def catalog_generator(catalog_name, logger=LOGGER) -> pystac_client.Client | None: catalog_dict = {PLANETARY_COMPUTER: create_planetary_computer_catalog} if catalog_name not in catalog_dict: logger.error(f"Unsupported catalog name: {catalog_name}") @@ -70,7 +69,7 @@ def list_available_catalogs(logger: logging.Logger = LOGGER) -> frozenset[str]: class AssetSubItem: - def __init__(self, asset, item_id: str, band: str, filename: Union[str, pathlib.Path]): + def __init__(self, asset, item_id: str, band: str, filename: str | pathlib.Path): if isinstance(filename, str): filename = pathlib.Path(filename) self.asset = asset @@ -83,10 +82,10 @@ class Asset: def __init__( self, asset_id: str, - bands: Optional[list[str]] = None, - asset_item_list: Optional[list[AssetSubItem]] = None, - merged_asset_path: Optional[Union[str, pathlib.Path]] = None, - reprojected_asset: Optional[Union[str, pathlib.Path]] = None, + bands: list[str] | None = None, + asset_item_list: list[AssetSubItem] | None = None, + merged_asset_path: str | pathlib.Path | None = None, + reprojected_asset: str | pathlib.Path | None = None, logger: logging.Logger = LOGGER, ): self.asset_id = asset_id @@ -110,8 +109,8 @@ def show_asset_items(self): self.logger.info(f"Asset list for asset [{self.asset_id}] : \n\t{asset_list}") def merge_asset( - self, base_directory: Optional[Union[str, pathlib.Path]] = None, delete_sub_items: bool = False - ) -> Union[pathlib.Path, None]: + self, base_directory: str | pathlib.Path | None = None, delete_sub_items: bool = False + ) -> pathlib.Path | None: if not base_directory: base_directory = "" if isinstance(base_directory, str): @@ -142,8 +141,8 @@ def merge_asset( def reproject_merged_asset( self, - target_projection: Union[str, int], - base_directory: Union[str, pathlib.Path] = None, + target_projection: str | int, + base_directory: str | pathlib.Path = None, delete_merged_asset: bool = False, ): if not base_directory: @@ -199,25 +198,25 @@ class StacSearch: def __init__(self, catalog_name, logger=LOGGER): self.catalog: pystac_client.Client = catalog_generator(catalog_name=catalog_name) - self.search_results: Optional[list[pystac.Item]] = None - self.cloud_cover_sorted_results: Optional[list[pystac.Item]] = None - self.filtered_results: Optional[list[pystac.Item]] = None - self.downloaded_search_assets: Optional[list[Asset]] = None - self.downloaded_cloud_cover_sorted_assets: Optional[list[Asset]] = None + self.search_results: list[pystac.Item] | None = None + self.cloud_cover_sorted_results: list[pystac.Item] | None = None + self.filtered_results: list[pystac.Item] | None = None + self.downloaded_search_assets: list[Asset] | None = None + self.downloaded_cloud_cover_sorted_assets: list[Asset] | None = None self.downloaded_best_sorted_asset = None self.logger = logger def search( self, date_range=None, - max_items: Optional[int] = None, - limit: Optional[int] = None, - ids: Optional[list] = None, - collections: Optional[Union[str, list]] = None, - bbox: Optional[geotools_types.BBoxLike] = None, - intersects: Optional[geotools_types.IntersectsLike] = None, - query: Optional[dict] = None, - sortby: Optional[Union[list, dict]] = None, + max_items: int | None = None, + limit: int | None = None, + ids: list | None = None, + collections: str | list | None = None, + bbox: geotools_types.BBoxLike | None = None, + intersects: geotools_types.IntersectsLike | None = None, + query: dict | None = None, + sortby: list | dict | None = None, max_retries=3, delay=5, ) -> list: @@ -312,13 +311,13 @@ def search( def search_for_date_ranges( self, date_ranges: list[str], - max_items: Optional[int] = None, - limit: Optional[int] = None, - collections: Optional[Union[str, list]] = None, - bbox: Optional[geotools_types.BBoxLike] = None, - intersects: Optional[geotools_types.IntersectsLike] = None, - query: Optional[dict] = None, - sortby: Optional[Union[list, dict]] = None, + max_items: int | None = None, + limit: int | None = None, + collections: str | list | None = None, + bbox: geotools_types.BBoxLike | None = None, + intersects: geotools_types.IntersectsLike | None = None, + query: dict | None = None, + sortby: list | dict | None = None, max_retries=3, delay=5, ) -> list: @@ -401,14 +400,14 @@ def search_for_date_ranges( def _base_catalog_search( self, date_range: str, - max_items: Optional[int] = None, - limit: Optional[int] = None, - ids: Optional[list] = None, - collections: Optional[Union[str, list]] = None, - bbox: Optional[geotools_types.BBoxLike] = None, - intersects: Optional[geotools_types.IntersectsLike] = None, - query: Optional[dict] = None, - sortby: Optional[Union[list, dict]] = None, + max_items: int | None = None, + limit: int | None = None, + ids: list | None = None, + collections: str | list | None = None, + bbox: geotools_types.BBoxLike | None = None, + intersects: geotools_types.IntersectsLike | None = None, + query: dict | None = None, + sortby: list | dict | None = None, ): search = self.catalog.search( datetime=date_range, @@ -431,7 +430,7 @@ def _base_catalog_search( log_state(log_msg) return list(items) - def sort_results_by_cloud_coverage(self) -> Optional[list]: + def sort_results_by_cloud_coverage(self) -> list | None: """ Sort results by cloud coverage. @@ -449,7 +448,7 @@ def sort_results_by_cloud_coverage(self) -> Optional[list]: self.logger.warning("No results found: please run a search before trying to sort results") return None - def filter_no_data(self, property_name: str, max_no_data_value: int = 5) -> Optional[list[pystac.Item]]: + def filter_no_data(self, property_name: str, max_no_data_value: int = 5) -> list[pystac.Item] | None: """ Filter results and sorted results that are above a nodata value threshold. @@ -511,7 +510,7 @@ def _download_assets(self, item: pystac.Item, bands: list, base_directory: pathl return downloaded_files def _download_results( - self, results: Optional[list[pystac.Item]], bands: list, base_directory: Union[str, pathlib.Path] + self, results: list[pystac.Item] | None, bands: list, base_directory: str | pathlib.Path ) -> list[Asset]: if not results: return [] @@ -527,7 +526,7 @@ def _download_results( downloaded_search_results.append(downloaded_item) return downloaded_search_results - def download_search_results(self, bands: list, base_directory: Union[str, pathlib.Path]) -> list[Asset]: + def download_search_results(self, bands: list, base_directory: str | pathlib.Path) -> list[Asset]: """ Parameters @@ -561,7 +560,7 @@ def _generate_best_results(self): return results def download_sorted_by_cloud_cover_search_results( - self, bands: list, base_directory: Union[str, pathlib.Path], first_x_num_of_items: Optional[int] = None + self, bands: list, base_directory: str | pathlib.Path, first_x_num_of_items: int | None = None ) -> list[Asset]: """ @@ -589,9 +588,7 @@ def download_sorted_by_cloud_cover_search_results( self.downloaded_cloud_cover_sorted_assets = downloaded_search_results return downloaded_search_results - def download_best_cloud_cover_result( - self, bands: list, base_directory: Union[str, pathlib.Path] - ) -> Optional[Asset]: + def download_best_cloud_cover_result(self, bands: list, base_directory: str | pathlib.Path) -> Asset | None: """ Parameters diff --git a/geospatial_tools/utils.py b/geospatial_tools/utils.py index ac7b2a8..1b95200 100644 --- a/geospatial_tools/utils.py +++ b/geospatial_tools/utils.py @@ -8,7 +8,6 @@ import pathlib import sys import zipfile -from typing import Optional, Union import requests import yaml @@ -90,7 +89,7 @@ def get_yaml_config(yaml_config_file: str, logger: logging.Logger = LOGGER) -> d for path in potential_paths: if path.exists(): config_filepath = path - logger.info(f"Yaml config file [{str(path)}] found.") + logger.info(f"Yaml config file [{path!s}] found.") break params = {} @@ -137,7 +136,7 @@ def get_json_config(json_config_file: str, logger=LOGGER) -> dict: for path in potential_paths: if path.exists(): config_filepath = path - logger.info(f"JSON config file [{str(path)}] found.") + logger.info(f"JSON config file [{path!s}] found.") break if not config_filepath: @@ -153,7 +152,7 @@ def get_json_config(json_config_file: str, logger=LOGGER) -> dict: return {} -def create_crs(dataset_crs: Union[str, int], logger=LOGGER): +def create_crs(dataset_crs: str | int, logger=LOGGER): """ Parameters @@ -185,9 +184,7 @@ def create_crs(dataset_crs: Union[str, int], logger=LOGGER): logger.error(f"Encountered problem while trying to format EPSG code from input : [{dataset_crs}]") -def download_url( - url: str, filename: Union[str, pathlib.Path], overwrite: bool = False, logger=LOGGER -) -> Optional[pathlib.Path]: +def download_url(url: str, filename: str | pathlib.Path, overwrite: bool = False, logger=LOGGER) -> pathlib.Path | None: """ This function downloads a file from a given URL. @@ -223,9 +220,7 @@ def download_url( return None -def unzip_file( - zip_path: Union[str, pathlib.Path], extract_to: Union[str, pathlib.Path], logger: logging.Logger = LOGGER -): +def unzip_file(zip_path: str | pathlib.Path, extract_to: str | pathlib.Path, logger: logging.Logger = LOGGER): """ This function unzips an archive to a specific directory. diff --git a/geospatial_tools/vector.py b/geospatial_tools/vector.py index f2083d9..a386bc0 100644 --- a/geospatial_tools/vector.py +++ b/geospatial_tools/vector.py @@ -6,7 +6,6 @@ from concurrent.futures import ProcessPoolExecutor from multiprocessing import cpu_count from pathlib import Path -from typing import Union import dask_geopandas as dgpd import geopandas as gpd @@ -22,7 +21,7 @@ def create_grid_coordinates( - bounding_box: Union[list, tuple], grid_size: float, logger: logging.Logger = LOGGER + bounding_box: list | tuple, grid_size: float, logger: logging.Logger = LOGGER ) -> tuple[ndarray, ndarray]: """ Create grid coordinates based on input bounding box and grid size. @@ -90,7 +89,7 @@ def _create_polygons_from_coords_chunk(chunk: tuple[ndarray, ndarray, float]) -> """ lon_coords, lat_coords, grid_size = chunk polygons = [] - for lon, lat in zip(lon_coords, lat_coords): + for lon, lat in zip(lon_coords, lat_coords, strict=False): polygons.append( Polygon([(lon, lat), (lon + grid_size, lat), (lon + grid_size, lat + grid_size), (lon, lat + grid_size)]) ) @@ -98,7 +97,7 @@ def _create_polygons_from_coords_chunk(chunk: tuple[ndarray, ndarray, float]) -> def create_vector_grid( - bounding_box: Union[list, tuple], grid_size: float, crs: str = None, logger: logging.Logger = LOGGER + bounding_box: list | tuple, grid_size: float, crs: str = None, logger: logging.Logger = LOGGER ) -> GeoDataFrame: """ Create a grid of polygons within the specified bounds and cell size. This function uses NumPy vectorized arrays for @@ -142,9 +141,9 @@ def create_vector_grid( def create_vector_grid_parallel( - bounding_box: Union[list, tuple], + bounding_box: list | tuple, grid_size: float, - crs: Union[str, int] = None, + crs: str | int = None, num_of_workers: int = None, logger: logging.Logger = LOGGER, ) -> GeoDataFrame: @@ -338,7 +337,7 @@ def select_polygons_by_location( return filtered_result_gdf -def to_geopackage(gdf: GeoDataFrame, filename: Union[str, Path], logger=LOGGER) -> str: +def to_geopackage(gdf: GeoDataFrame, filename: str | Path, logger=LOGGER) -> str: """ Save GeoDataFrame to a Geopackage file. diff --git a/poetry.lock b/poetry.lock index 5c0cbdc..eea98d5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -292,47 +292,49 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.8.0" +version = "25.1.0" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, - {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, - {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, - {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, - {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, - {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, - {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, - {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, - {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, - {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, - {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, - {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, - {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, - {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, - {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, - {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, - {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, - {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, - {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, - {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, - {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, - {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, ] [package.dependencies] click = ">=8.0.0" +ipython = {version = ">=7.8.0", optional = true, markers = "extra == \"jupyter\""} mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" +tokenize-rt = {version = ">=3.2.0", optional = true, markers = "extra == \"jupyter\""} tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -4591,6 +4593,17 @@ webencodings = ">=0.4" doc = ["sphinx", "sphinx_rtd_theme"] test = ["pytest", "ruff"] +[[package]] +name = "tokenize-rt" +version = "6.1.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +optional = false +python-versions = ">=3.9" +files = [ + {file = "tokenize_rt-6.1.0-py2.py3-none-any.whl", hash = "sha256:d706141cdec4aa5f358945abe36b911b8cbdc844545da99e811250c0cee9b6fc"}, + {file = "tokenize_rt-6.1.0.tar.gz", hash = "sha256:e8ee836616c0877ab7c7b54776d2fefcc3bde714449a206762425ae114b53c86"}, +] + [[package]] name = "tomli" version = "2.0.1" @@ -4970,4 +4983,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.12" -content-hash = "956680935c77616946350cc66cac1d94dac69e8e9745a9471e882f48d8004aca" +content-hash = "74f183b89189fcd2a6eb408d828ee795ca2546fb311ad460ffbc3863c37786e0" diff --git a/pyproject.toml b/pyproject.toml index ca56900..b9c03f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ pre-commit = "^3.7.0" flake8-pyproject = "^1.2.3" docformatter = {extras = ["tomli"], version = "^1.7.5"} nbval = "^0.11.0" -black = "^24.8.0" +black = {extras = ["jupyter"], version = "^25.1.0"} nox = "^2024.4.15" autoflake = "^2.3.1" autopep8 = "^2.3.2" diff --git a/scripts/sentinel_2_search_and_process/download_and_process.py b/scripts/sentinel_2_search_and_process/download_and_process.py index 5bbd757..448f1a2 100644 --- a/scripts/sentinel_2_search_and_process/download_and_process.py +++ b/scripts/sentinel_2_search_and_process/download_and_process.py @@ -29,7 +29,7 @@ def _handle_product_list(product_list: str) -> list: parsed_product_list = [] if product_list.endswith(".txt") and pathlib.Path(product_list).exists(): - with open(product_list, "r", encoding="utf-8") as f: + with open(product_list, encoding="utf-8") as f: for line in f: parsed_product_list.append(line.strip()) if not product_list.endswith(".txt"): @@ -87,7 +87,7 @@ def download_and_process( LOGGER.info(f"Will download and process the following products: {parsed_product_list}") if not parsed_product_list: LOGGER.error("Error - Product list not found!") - return None + return LOGGER.info(f"Loading best results file {best_products_file}") best_results = gpd.read_file(best_products_file) diff --git a/tests/test_notebooks/test_planetary_computer_sentinel2.ipynb b/tests/test_notebooks/test_planetary_computer_sentinel2.ipynb index b113487..1242506 100644 --- a/tests/test_notebooks/test_planetary_computer_sentinel2.ipynb +++ b/tests/test_notebooks/test_planetary_computer_sentinel2.ipynb @@ -2,6 +2,7 @@ "cells": [ { "cell_type": "code", + "execution_count": 1, "id": "26590ffa", "metadata": { "ExecuteTime": { @@ -9,22 +10,23 @@ "start_time": "2025-05-22T18:12:04.226800Z" } }, + "outputs": [], "source": [ "import shutil\n", "\n", - "import leafmap\n", "import geopandas as gpd\n", + "import leafmap\n", + "\n", "from geospatial_tools import DATA_DIR, TESTS_DIR\n", + "from geospatial_tools.planetary_computer.sentinel_2 import BestProductsForFeatures, download_and_process_sentinel2_asset\n", "from geospatial_tools.raster import clip_raster_with_polygon\n", "from geospatial_tools.stac import Asset\n", - "from geospatial_tools.planetary_computer.sentinel_2 import BestProductsForFeatures, download_and_process_sentinel2_asset\n", - "from geospatial_tools.vector import create_vector_grid_parallel, to_geopackage, select_polygons_by_location" - ], - "outputs": [], - "execution_count": 1 + "from geospatial_tools.vector import create_vector_grid_parallel, select_polygons_by_location, to_geopackage" + ] }, { "cell_type": "code", + "execution_count": 2, "id": "bbf8d1aede2a4fd2", "metadata": { "ExecuteTime": { @@ -32,12 +34,11 @@ "start_time": "2025-05-22T18:12:05.677670Z" } }, + "outputs": [], "source": [ "TEST_TMP_DIR = TESTS_DIR / \"test_notebooks/tmp_sentinel2\"\n", "TEST_TMP_DIR.mkdir(exist_ok=True)" - ], - "outputs": [], - "execution_count": 2 + ] }, { "cell_type": "markdown", @@ -62,6 +63,7 @@ }, { "cell_type": "code", + "execution_count": 3, "id": "fcb67d1eb21aafc1", "metadata": { "ExecuteTime": { @@ -69,17 +71,17 @@ "start_time": "2025-05-22T18:12:05.727177Z" } }, + "outputs": [], "source": [ "USA_POLYGON_FILE = DATA_DIR / \"usa_polygon_5070.gpkg\"\n", "S2_USA_GRID_FILE = DATA_DIR / \"s2_grid_usa_polygon_5070.gpkg\"\n", "usa_polygon = gpd.read_file(USA_POLYGON_FILE)\n", "s2_grid = gpd.read_file(S2_USA_GRID_FILE)" - ], - "outputs": [], - "execution_count": 3 + ] }, { "cell_type": "code", + "execution_count": 4, "id": "314e7c068786ad4d", "metadata": { "ExecuteTime": { @@ -87,19 +89,9 @@ "start_time": "2025-05-22T18:12:05.780131Z" } }, - "source": [ - "usa_polygon" - ], "outputs": [ { "data": { - "text/plain": [ - " AFFGEOID GEOID NAME \\\n", - "0 0100000US US United States \n", - "\n", - " geometry \n", - "0 MULTIPOLYGON (((-2116048.733 3142966.552, -211... " - ], "text/html": [ "
\n", "