diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aa383614cc..8672681212 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,8 @@ files: | setup\.py| docs\/.+\.py| lib\/.+\.py| - benchmarks\/.+\.py + benchmarks\/.+\.py| + tools\/.+\.py ) minimum_pre_commit_version: 1.21.0 diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index 2dcbd03ea1..43e648ff80 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -88,6 +88,11 @@ New features shall not be included in a patch release, these are for bug fixes. A patch release does not require a release candidate, but the rest of the release process is to be followed. +As mentioned in :ref:`release_branch`: branch/commit management is much simpler +if the patch changes are **first merged into the release branch** - +e.g. ``v1.9.x`` - and are only added to ``main`` during :ref:`merge_back` (post +release). + Before Release -------------- @@ -111,6 +116,8 @@ from the `latest CF standard names`_. The Release ----------- +.. _release_branch: + Release Branch ~~~~~~~~~~~~~~ @@ -193,6 +200,8 @@ of the new release. Ideally this would be updated before the release, but the DOI for the new version is only available once the release has been created in GitHub. +.. _merge_back: + Merge Back ~~~~~~~~~~ diff --git a/noxfile.py b/noxfile.py index 4d733fa2d4..2d4f78b35c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -180,12 +180,14 @@ def tests(session: nox.sessions.Session): """ prepare_venv(session) session.install("--no-deps", "--editable", ".") + session.install("git+https://github.com/SciTools-incubator/nothing.git") session.env.update(ENV) run_args = [ "pytest", "-n", "auto", "lib/iris/tests", + "tools", ] if "-c" in session.posargs or "--coverage" in session.posargs: run_args[-1:-1] = ["--cov=lib/iris", "--cov-report=xml"] diff --git a/pyproject.toml b/pyproject.toml index 88ae1c3bcc..95124f5710 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,13 +65,13 @@ extend-exclude = [ "_ff_cross_references.py", "um_cf_map.py", "docs/src/sphinxext/api_rst_formatting.py", - "tools", ] line-length = 88 src = [ "benchmarks", "lib", "docs/src", + "tools", ] [tool.ruff.format] @@ -108,9 +108,16 @@ known-first-party = ["iris"] "lib/iris/tests/*.py" = [ # https://docs.astral.sh/ruff/rules/undocumented-public-module/ "D100", # Missing docstring in public module + "D106", # Missing docstring in public nested class "D205", # 1 blank line required between summary line and description "D401", # 1 First line of docstring should be in imperative mood ] +"tools/test_*.py" = [ + "D100", + "D106", + "D205", + "D401", +] # Deprecated unittest tests diff --git a/tools/generate_std_names.py b/tools/generate_std_names.py index 1e491ad5f7..996d55483b 100644 --- a/tools/generate_std_names.py +++ b/tools/generate_std_names.py @@ -2,9 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -A script to convert the standard names information from the provided XML -file into a Python dictionary format. +"""Script converting standard names information from provided XML file to a Python dict. Takes two arguments: the first is the XML file to process and the second is the name of the file to write the Python dictionary file into. @@ -19,10 +17,10 @@ """ import argparse +from pathlib import Path import pprint import xml.etree.ElementTree as ET - STD_VALUES_FILE_TEMPLATE = ''' # Copyright Iris contributors # @@ -61,7 +59,8 @@ def process_name_table(tree, element_name, *child_elements): - """ + """Yield id->mapping dictionaries for each entry in the standard name table. + Yields a series of dictionaries with the key being the id of the entry element and the value containing another dictionary mapping other attributes of the standard name to their values, e.g. units, description, grib value etc. """ @@ -70,9 +69,11 @@ def process_name_table(tree, element_name, *child_elements): for child_elem in child_elements: found_elem = elem.find(child_elem) - sub_section[child_elem] = found_elem.text if found_elem is not None else None + sub_section[child_elem] = ( + found_elem.text if found_elem is not None else None + ) - yield {elem.get("id") : sub_section} + yield {elem.get("id"): sub_section} def to_dict(infile, outfile): @@ -81,36 +82,41 @@ def to_dict(infile, outfile): tree = ET.parse(infile) - cf_table_version_string = tree.find('version_number').text + cf_table_version_string = tree.find("version_number").text - for section in process_name_table(tree, 'entry', 'canonical_units'): + for section in process_name_table(tree, "entry", "canonical_units"): values.update(section) - for section in process_name_table(tree, 'alias', 'entry_id'): + for section in process_name_table(tree, "alias", "entry_id"): aliases.update(section) for key, valued in aliases.items(): - values.update({ - key : {'canonical_units' : values.get(valued['entry_id']).get('canonical_units')} - }) + values.update( + { + key: { + "canonical_units": values.get(valued["entry_id"]).get( + "canonical_units" + ) + } + } + ) text = STD_VALUES_FILE_TEMPLATE.format(table_version=cf_table_version_string) text += pprint.pformat(values) - text += f'\n\nCF_STANDARD_NAMES_TABLE_VERSION = {cf_table_version_string}\n' + text += f"\n\nCF_STANDARD_NAMES_TABLE_VERSION = {cf_table_version_string}\n" outfile.write(text) if __name__ == "__main__": parser = argparse.ArgumentParser( - description='Create Python code from CF standard name XML.') - parser.add_argument('input', metavar='INPUT', - help='Path to CF standard name XML') - parser.add_argument('output', metavar='OUTPUT', - help='Path to resulting Python code') + description="Create Python code from CF standard name XML." + ) + parser.add_argument("input", metavar="INPUT", help="Path to CF standard name XML") + parser.add_argument( + "output", metavar="OUTPUT", help="Path to resulting Python code" + ) args = parser.parse_args() - encoding = {'encoding': 'utf-8'} - - with open(args.input, 'r', **encoding) as in_fh: - with open(args.output, 'w', **encoding) as out_fh: + with open(args.input, "r", encoding="utf-8") as in_fh: + with open(args.output, "w", encoding="utf-8") as out_fh: to_dict(in_fh, out_fh) diff --git a/tools/release_do_nothing.py b/tools/release_do_nothing.py index 34700ebb87..5d1479206d 100755 --- a/tools/release_do_nothing.py +++ b/tools/release_do_nothing.py @@ -3,18 +3,22 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -A do-nothing script to hand-hold through the Iris release process. +"""A do-nothing script to hand-hold through the Iris release process. https://blog.danslimmon.com/2019/07/15/do-nothing-scripting-the-key-to-gradual-automation/ """ + from datetime import datetime from enum import IntEnum from pathlib import Path import re +import shlex +import subprocess import typing +from packaging.version import InvalidVersion, Version + try: from nothing import Progress except ImportError: @@ -25,17 +29,37 @@ raise ImportError(install_message) +class IrisVersion(Version): + def __str__(self): + return f"v{super().__str__()}" + + @property + def minor_series(self) -> str: + return f"v{self.major}.{self.minor}" + + @property + def branch(self) -> str: + return f"{self.minor_series}.x" + + @property + def short(self) -> str: + return str(self)[1:] + + class IrisRelease(Progress): class ReleaseTypes(IntEnum): + """Enumeration of semantic versioning types.""" + MAJOR = 0 MINOR = 1 PATCH = 2 - github_user: str = None - release_type: ReleaseTypes = None - git_tag: str = None # v1.2.3rc0 - first_in_series: bool = None - sha256: str = None + github_scitools: str = "upstream" + github_fork: str = "origin" + github_user: typing.Optional[str] = None + patch_min_max_tag: typing.Optional[tuple[str, str]] = None + git_tag: typing.Optional[str] = None # v1.2.3rc0 + sha256: typing.Optional[str] = None @classmethod def get_cmd_description(cls) -> str: @@ -44,75 +68,139 @@ def get_cmd_description(cls) -> str: @classmethod def get_steps(cls) -> list[typing.Callable[..., None]]: return [ - cls.get_github_user, - cls.get_release_type, + cls.analyse_remotes, cls.get_release_tag, - cls.check_release_candidate, - cls.check_first_in_series, + cls.get_all_patches, + cls.apply_patches, + cls.validate, + cls.release_highlights, cls.update_standard_names, cls.check_deprecations, cls.create_release_branch, cls.finalise_whats_new, cls.cut_release, - cls.check_rtd, cls.check_pypi, cls.update_conda_forge, + cls.check_rtd, cls.update_links, cls.bluesky_announce, + cls.revisit_conda_forge, cls.merge_back, cls.next_release, ] - def get_github_user(self): - def validate(input_user: str) -> str | None: - if not re.fullmatch(r"[a-zA-Z0-9-]+", input_user): - self.report_problem("Invalid GitHub username. Please try again ...") - else: - return input_user + @staticmethod + def _git_remote_v() -> str: + # Factored out to assist with testing. + return subprocess.check_output(shlex.split("git remote -v"), text=True) + + def _git_remote_get_url(self) -> str: + # Factored out to assist with testing. + return subprocess.check_output( + shlex.split(f"git remote get-url {self.github_fork}"), text=True + ) - message = ( - "Please input your GitHub username.\n" - "This is used in the URLs for creating pull requests." + def analyse_remotes(self): + self.print("Analysing Git remotes ...") + + class Remote(typing.NamedTuple): + name: str + url: str + fetch: bool + + remotes_raw = self._git_remote_v().splitlines() + remotes_split = [line.split() for line in remotes_raw] + remotes = [ + Remote(name=parts[0], url=parts[1], fetch=parts[2] == "(fetch)") + for parts in remotes_split + ] + + scitools_regex = re.compile(r"github\.com[:/]SciTools/iris") + self.github_scitools = [ + r.name for r in remotes if r.fetch and scitools_regex.search(r.url) + ][0] + + possible_forks = [ + r for r in remotes if not r.fetch and r.name != self.github_scitools + ] + assert len(possible_forks) > 0 + + def number_to_fork(input_number: str) -> str | None: + try: + result = possible_forks[int(input_number)].name + except (ValueError, IndexError): + result = None + self.report_problem("Invalid number. Please try again ...") + return result + + numbered_forks = " | ".join( + [f"{ix}: {r.name}" for ix, r in enumerate(possible_forks)] ) self.set_value_from_input( - key="github_user", - message=message, - expected_inputs="Username", - post_process=validate, + key="github_fork", + message="Which remote is your Iris fork?", + expected_inputs=f"Choose a number {numbered_forks}", + post_process=number_to_fork, ) - self.print(f"GitHub username = {self.github_user}") - def get_release_type(self): - def validate(input_value: str) -> IrisRelease.ReleaseTypes | None: - try: - return self.ReleaseTypes(int(input_value)) - except ValueError: - self.report_problem("Invalid release type. Please try again ...") + fork_url = self._git_remote_get_url() + search_result = re.search( + r"(?<=github\.com[:/])([a-zA-Z0-9-]+)(?=/)", + fork_url, + ) + if search_result is None: + message = f"Error deriving GitHub username from URL: {fork_url}" + raise RuntimeError(message) + else: + self.github_user = search_result.group(0) - self.set_value_from_input( - key="release_type", - message="What type of release are you preparing?\nhttps://semver.org/", - expected_inputs=f"Choose a number {tuple(self.ReleaseTypes)}", - post_process=validate, + def _git_ls_remote_tags(self) -> str: + # Factored out to assist with testing. + return subprocess.check_output( + shlex.split(f"git ls-remote --tags {self.github_scitools}"), + text=True, ) - self.print(f"{repr(self.release_type)} confirmed.") - def get_release_tag(self): - # TODO: automate using setuptools_scm. + def _get_tagged_versions(self) -> list[IrisVersion]: + tag_regex = re.compile(r"(?<=refs/tags/).*$") + scitools_tags_raw = self._git_ls_remote_tags().splitlines() + scitools_tags_searched = [tag_regex.search(line) for line in scitools_tags_raw] + scitools_tags = [ + search.group(0) for search in scitools_tags_searched if search is not None + ] + + def get_version(tag: str) -> IrisVersion | None: + try: + return IrisVersion(tag) + except InvalidVersion: + return None + + versions = [get_version(tag) for tag in scitools_tags] + tagged_versions = [v for v in versions if v is not None] + if len(tagged_versions) == 0: + message = ( + "Error: unable to find any valid version tags in the " + f"{self.github_scitools} remote." + ) + raise RuntimeError(message) + return tagged_versions + def get_release_tag(self): def validate(input_tag: str) -> str | None: - # TODO: use the packaging library? - version_mask = r"v\d+\.\d+\.\d+\D*.*" - regex_101 = "https://regex101.com/r/dLVaNH/1" - if re.fullmatch(version_mask, input_tag) is None: - problem_message = ( - "Release tag does not match the input mask:\n" - f"{version_mask}\n" - f"({regex_101})" - ) - self.report_problem(problem_message) + result = None + try: + version = IrisVersion(input_tag) + except InvalidVersion as err: + self.report_problem(f"Packaging error: {err}\nPlease try again ...") else: - return input_tag # v1.2.3rc0 + if version in self._get_tagged_versions(): + self.report_problem( + f"Version {version} already exists as a git tag. " + "Please try again ..." + ) + else: + result = input_tag # v1.2.3rc0 + return result message = ( "Input the release tag you are creating today, including any " @@ -129,71 +217,232 @@ def validate(input_tag: str) -> str | None: post_process=validate, ) - class Strings(typing.NamedTuple): - series: str - branch: str - release: str + @property + def version(self) -> IrisVersion: + # Implemented like this since the Version class cannot be JSON serialised. + return IrisVersion(self.git_tag) @property - def strings(self) -> Strings: - series = ".".join(self.git_tag.split(".")[:2]) # v1.2 - return self.Strings( - series=series, - branch=series + ".x", # v1.2.x - release=self.git_tag[1:], # 1.2.3rc0 - ) + def is_latest_tag(self) -> bool: + return all(self.version >= v for v in self._get_tagged_versions()) + + @property + def release_type(self) -> ReleaseTypes: + if self.version.micro == 0: + if self.version.minor == 0: + release_type = self.ReleaseTypes.MAJOR + else: + release_type = self.ReleaseTypes.MINOR + else: + release_type = self.ReleaseTypes.PATCH + return release_type @property def is_release_candidate(self) -> bool: - return "rc" in self.git_tag + return self.version.is_prerelease and self.version.pre[0] == "rc" - def check_release_candidate(self): - message = "Checking tag for release candidate: " - if self.is_release_candidate: - message += "DETECTED\nThis IS a release candidate." - else: - message += "NOT DETECTED\nThis IS NOT a release candidate." - self.print(message) + @property + def first_in_series(self) -> bool: + release_step = IrisRelease.get_steps().index(IrisRelease.cut_release) + release_complete = self.latest_complete_step >= release_step + same_series = [ + v + for v in self._get_tagged_versions() + if v.minor_series == self.version.minor_series + ] + result = len(same_series) == 0 or ( + release_complete and same_series == [self.version] + ) + return result - if self.release_type == self.ReleaseTypes.PATCH and self.is_release_candidate: + def get_all_patches(self): + if self.release_type is self.ReleaseTypes.PATCH: message = ( - "Release candidates are not expected for PATCH releases. " - "Are you sure you want to continue?" + "PATCH release detected. Sometimes a patch needs to be applied " + "to multiple minor_series." ) - if self.get_input(message, "y / [n]").casefold() != "y".casefold(): - exit() + self.print(message) - def check_first_in_series(self): - if self.release_type != self.ReleaseTypes.PATCH: - message = ( - f"Is this the first release in the {self.strings.series} " - f"series, including any release candidates?" + tagged_versions = self._get_tagged_versions() + series_all = [v.minor_series for v in sorted(tagged_versions)] + series_unique = sorted(set(series_all), key=series_all.index) + series_numbered = "\n".join( + f"{i}: {s}" for i, s in enumerate(series_unique) ) + + def numbers_to_new_patches(input_numbers: str) -> tuple[str, str] | None: + try: + first_str, last_str = input_numbers.split(",") + first, last = int(first_str), int(last_str) + except ValueError: + self.report_problem( + "Invalid input, expected two integers comma-separated. " + "Please try again ..." + ) + return None + + try: + series_min = series_unique[first] + series_max = series_unique[last] + except IndexError: + self.report_problem("Invalid numbers. Please try again ...") + return None + + def series_new_patch(series: str) -> str: + latest = max(v for v in tagged_versions if v.minor_series == series) + iris_version = IrisVersion( + f"{latest.major}.{latest.minor}.{latest.micro + 1}" + ) + return str(iris_version) + + return (series_new_patch(series_min), series_new_patch(series_max)) + self.set_value_from_input( - key="first_in_series", + key="patch_min_max_tag", + message=( + f"{series_numbered}\n\n" + "Input the earliest and latest minor_series that need patching." + ), + expected_inputs=f"Choose two numbers from above e.g. 0,2", + post_process=numbers_to_new_patches, + ) + + first_patch = self.patch_min_max[0] + if self.version > first_patch: + message = ( + f"Starting with {first_patch}. ({self.version} will be " + "covered in sequence)" + ) + self.print(message) + self.git_tag = str(first_patch) + + @property + def patch_min_max(self) -> tuple[IrisVersion, IrisVersion] | None: + if self.patch_min_max_tag is None: + result = None + else: + assert len(self.patch_min_max_tag) == 2 + result = ( + IrisVersion(self.patch_min_max_tag[0]), + IrisVersion(self.patch_min_max_tag[1]), + ) + return result + + @property + def more_patches_after_this_one(self) -> bool: + return ( + self.release_type is self.ReleaseTypes.PATCH + and self.patch_min_max is not None + and self.version.minor_series < self.patch_min_max[1].minor_series + ) + + def apply_patches(self): + if self.release_type is self.ReleaseTypes.PATCH: + message = ( + f"Input the {self.github_scitools} branch name where the patch " + "change commit(s) exist, or make no input if nothing has been " + "merged yet." + ) + patch_branch = self.get_input( message=message, - expected_inputs="y / n", - post_process=lambda x: x.casefold() == "y".casefold(), + expected_inputs="", ) - if self.first_in_series: - self.print("First in series confirmed.") - if not self.is_release_candidate: + match patch_branch: + case self.version.branch: message = ( - "The first release in a series is expected to be a " - "release candidate, but this is not. Are you sure you " - "want to continue?" + "The patch change(s) are on the ideal branch to avoid later " + f"Git conflicts: {self.version.branch} . Continue ..." ) - if self.get_input(message, "y / [n]").casefold() != "y".casefold(): - exit() - else: - self.print("Existing series confirmed.") + case "": + message = ( + f"Propose the patch change(s) against {self.version.branch} via " + f"pull request(s). Targeting {self.version.branch} will " + "avoid later Git conflicts." + ) + case _: + message = ( + "Create pull request(s) cherry-picking the patch change(s) " + f"from {patch_branch} into {self.version.branch} .\n" + "cherry-picking will cause Git conflicts later in the " + "release process; in future consider targeting the patch " + "change(s) directly at the release branch." + ) + + self.wait_for_done(message) + + def validate(self) -> None: + self.print("Validating release details ...") + + message_template = ( + f"{self.version} corresponds to a {{}} release. This script cannot " + "handle such releases." + ) + if self.version.is_devrelease: + message = message_template.format("development") + raise RuntimeError(message) + if self.version.is_postrelease: + message = message_template.format("post") + raise RuntimeError(message) + + if self.version.is_prerelease and self.version.pre[0] != "rc": + message = ( + "The only pre-release type that this script can handle is 'rc' " + f"(for release candidate), but got '{self.version.pre[0]}'." + ) + raise RuntimeError(message) + + if self.release_type is self.ReleaseTypes.PATCH and self.is_release_candidate: + message = ( + f"{self.version} corresponds to a PATCH release AND a release " + "candidate. This script cannot handle that combination." + ) + raise RuntimeError(message) + + if self.first_in_series: + message_pre = f"No previous releases found in the {self.version.minor_series} minor_series." + if self.release_type is self.ReleaseTypes.PATCH: + message = ( + f"{message_pre} This script cannot handle a PATCH release " + f"that is the first in a minor_series." + ) + raise RuntimeError(message) + + if not self.is_release_candidate: + message = ( + f"{message_pre} The first release in a minor_series is expected " + f"to be a release candidate, but this is not. Are you sure " + f"you want to continue?" + ) + if self.get_input(message, "y / [n]").casefold() != "y".casefold(): + exit() + + status = { + "GitHub user": self.github_user, + "SciTools remote": self.github_scitools, + "Fork remote": self.github_fork, + "Release tag": self.git_tag, + "Release type": self.release_type.name, + "Release candidate?": self.is_release_candidate, + f"First release in {self.version.minor_series} minor_series?": self.first_in_series, + "Current latest Iris release": max(self._get_tagged_versions()), + } + if ( + self.release_type is self.ReleaseTypes.PATCH + and self.patch_min_max is not None + ): + status["Minor series being patched"] = ( + f"{self.patch_min_max[0].minor_series} to {self.patch_min_max[1].minor_series}" + ) + message = ( + "\n".join(f"- {k}: {v}" for k, v in status.items()) + "\n\n" + "Confirm that the details above are correct.\n" + "Consider temporary/permanent edits to the do-nothing script if " + "necessary." + ) + self.wait_for_done(message) def _create_pr( - self, - base_org: str, - base_repo: str, - base_branch: str, - head_branch: str + self, base_org: str, base_repo: str, base_branch: str, head_branch: str ) -> None: """Instruct user to create a PR with a specified base and head. @@ -223,14 +472,24 @@ def _create_pr( ) self.wait_for_done(pr_message) + def release_highlights(self): + if self.first_in_series: + message = ( + "Assemble some bullet points summarising the highlights of " + "this release. Share with the development team for feedback.\n" + "The finalised highlights will be included in the What's New " + "page later in this process." + ) + self.wait_for_done(message) + def update_standard_names(self): if self.first_in_series: - working_branch = self.strings.branch + ".standard_names" + working_branch = self.version.branch + ".standard_names" self._delete_local_branch(working_branch) message = ( "Checkout a local branch from the official ``main`` branch.\n" - "git fetch upstream;\n" - f"git checkout upstream/main -b {working_branch};" + f"git fetch {self.github_scitools};\n" + f"git checkout {self.github_scitools}/main -b {working_branch};" ) self.wait_for_done(message) @@ -241,7 +500,7 @@ def update_standard_names(self): f'wget "{url}" -O {file};\n' f"git add {file};\n" "git commit -m 'Update CF standard names table.';\n" - f"git push -u origin {working_branch};" + f"git push -u {self.github_fork} {working_branch};" ) self.wait_for_done(message) @@ -255,7 +514,7 @@ def update_standard_names(self): self.wait_for_done(message) def check_deprecations(self): - if self.release_type == self.ReleaseTypes.MAJOR: + if self.release_type is self.ReleaseTypes.MAJOR: message = ( "This is a MAJOR release - be sure to finalise all deprecations " "and FUTUREs from previous releases, via a new Pull Request.\n" @@ -270,36 +529,38 @@ def create_release_branch(self): if self.first_in_series: message = ( - "Visit https://github.com/SciTools/iris and create the" - f"``{self.strings.branch}`` release branch from ``main``." + "Visit https://github.com/SciTools/iris and create the " + f"``{self.version.branch}`` release branch from ``main``." ) self.wait_for_done(message) else: message = ( - "Cherry-pick any specific commits that are needed from ``main`` " - f"onto {self.strings.branch} , to get the CI passing.\n" + "If necessary: " + "cherry-pick any specific commits that are needed from ``main`` " + f"onto {self.version.branch} , to get the CI passing.\n" "E.g. a new dependency pin may have been introduced since " - f"{self.strings.branch} was last updated from ``main``.\n" - "DO NOT squash-merge - want to preserve the original commit " - "SHA's." + f"{self.version.branch} was last updated from ``main``.\n" + "Note that cherry-picking will cause Git conflicts later in " + "the release process." ) self.wait_for_done(message) - @staticmethod - def _delete_local_branch(branch_name: str): + def _delete_local_branch(self, branch_name: str): message = ( "Before the next step, avoid a name clash by deleting any " "existing local branch, if one exists.\n" f"git branch -D {branch_name};\n" - f"git push -d origin {branch_name};" + f"git push -d {self.github_fork} {branch_name};" ) IrisRelease.wait_for_done(message) class WhatsNewRsts(typing.NamedTuple): + """The various paths that make up the What's New structure.""" + latest: Path release: Path - index: Path + index_: Path template: Path @property @@ -311,21 +572,21 @@ def whats_news(self) -> WhatsNewRsts: return self.WhatsNewRsts( latest=latest, - release=whatsnew_dir / (self.strings.series[1:] + ".rst"), - index=whatsnew_dir / "index.rst", + release=whatsnew_dir / (self.version.minor_series[1:] + ".rst"), + index_=whatsnew_dir / "index.rst", template=latest.with_suffix(".rst.template"), ) def finalise_whats_new(self): self.print("What's New finalisation ...") - working_branch = self.strings.branch + ".updates" + working_branch = self.version.branch + ".updates" self._delete_local_branch(working_branch) message = ( - f"Checkout a local branch from the official {self.strings.branch} " + f"Checkout a local branch from the official {self.version.branch} " f"branch.\n" - "git fetch upstream;\n" - f"git checkout upstream/{self.strings.branch} -b " + f"git fetch {self.github_scitools};\n" + f"git checkout {self.github_scitools}/{self.version.branch} -b " f"{working_branch};" ) self.wait_for_done(message) @@ -340,7 +601,7 @@ def finalise_whats_new(self): self.wait_for_done(message) message = ( - f"In {self.whats_news.index.absolute()}:\n" + f"In {self.whats_news.index_.absolute()}:\n" f"Replace references to {self.whats_news.latest.name} with " f"{self.whats_news.release.name}" ) @@ -348,9 +609,9 @@ def finalise_whats_new(self): self.print(f"What's New file path = {self.whats_news.release}") - if not self.release_type == self.ReleaseTypes.PATCH: + if not self.release_type is self.ReleaseTypes.PATCH: whatsnew_title = ( - f"{self.strings.series} ({datetime.today().strftime('%d %b %Y')}" + f"{self.version.minor_series} ({datetime.today().strftime('%d %b %Y')}" ) if self.is_release_candidate: whatsnew_title += " [release candidate]" @@ -373,7 +634,7 @@ def finalise_whats_new(self): ) self.wait_for_done(message) - dropdown_title = f"\n{self.strings.series} Release Highlights\n" + dropdown_title = f"\n{self.version.minor_series} Release Highlights\n" message = ( f"In {self.whats_news.release.name}: set the sphinx-design " f"dropdown title to:{dropdown_title}" @@ -382,7 +643,7 @@ def finalise_whats_new(self): message = ( f"Review {self.whats_news.release.name} to ensure it is a good " - f"reflection of what is new in {self.strings.series}.\n" + f"reflection of what is new in {self.version.minor_series}.\n" "I.e. all significant work you are aware of should be " "present, such as a major dependency pin, a big new feature, " "a known performance change. You can not be expected to know " @@ -416,16 +677,16 @@ def finalise_whats_new(self): message = ( "Commit and push all the What's New changes.\n" f"git add {self.whats_news.release.absolute()};\n" - f"git add {self.whats_news.index.absolute()};\n" - f'git commit -m "Whats new updates for {self.git_tag} .";\n' - f"git push -u origin {working_branch};" + f"git add {self.whats_news.index_.absolute()};\n" + f'git commit -m "Whats-New updates for {self.version} .";\n' + f"git push -u {self.github_fork} {working_branch};" ) self.wait_for_done(message) self._create_pr( base_org="SciTools", base_repo="iris", - base_branch=self.strings.branch, + base_branch=self.version.branch, head_branch=working_branch, ) message = ( @@ -445,8 +706,8 @@ def cut_release(self): self.wait_for_done(message) message = ( - f"Select {self.strings.branch} as the Target.\n" - f"Input {self.git_tag} as the new tag to create, and also as " + f"Select {self.version.branch} as the Target.\n" + f"Input {self.version} as the new tag to create, and also as " "the Release title.\n" "Make sure you are NOT targeting the `main` branch." ) @@ -468,8 +729,8 @@ def cut_release(self): message = ( "This is a release candidate - include the following " "instructions for installing with conda or pip:\n" - f"conda install -c conda-forge/label/rc_iris iris={self.strings.release}\n" - f"pip install scitools-iris=={self.strings.release}" + f"conda install -c conda-forge/label/rc_iris iris={self.version.short}\n" + f"pip install scitools-iris=={self.version.short}" ) self.wait_for_done(message) @@ -480,7 +741,10 @@ def cut_release(self): self.wait_for_done(message) else: - message = "Tick the box to set this as the latest release." + if self.is_latest_tag: + message = "Tick the box to set this as the latest release." + else: + message = "Un-tick the latest release box." self.wait_for_done(message) message = "Click: Publish release !" @@ -503,48 +767,52 @@ def check_rtd(self): self.print("Read the Docs checks ...") message = ( - "Visit https://readthedocs.org/projects/scitools-iris/versions/ " + "Visit https://app.readthedocs.org/projects/scitools-iris/ " "and make sure you are logged in." ) self.wait_for_done(message) - message = f"Set {self.git_tag} to Active, un-Hidden." + add_version = ( + "You may need to click `Add version` if it is not already in the list" + ) + + message = f"Set {self.version} to Active, un-Hidden.\n{add_version}" self.wait_for_done(message) - message = f"Set {self.strings.branch} to Active, Hidden." + message = f"Set {self.version.branch} to Active, Hidden.\n{add_version}" self.wait_for_done(message) message = ( "Keep only the latest 2 branch doc builds active - " - f"'{self.strings.branch}' and the previous one - deactivate older " + f"'{self.version.branch}' and the previous one - deactivate older " "ones." ) self.wait_for_done(message) message = ( - f"Visit https://scitools-iris.readthedocs.io/en/{self.git_tag} " + f"Visit https://scitools-iris.readthedocs.io/en/{self.version} " "to confirm:\n\n" "- The docs have rendered.\n" "- The version badge in the top left reads:\n" - f" 'version (archived) | {self.git_tag}'\n" + f" 'version (archived) | {self.version}'\n" " (this demonstrates that setuptools_scm has worked correctly).\n" "- The What's New looks correct.\n" - f"- {self.git_tag} is available in RTD's version switcher.\n\n" - "NOTE: the docs can take several minutes to finish building." + f"- {self.version} is available in RTD's version switcher.\n" ) - if not self.is_release_candidate: + if not self.is_release_candidate and self.is_latest_tag: message += ( "- Selecting 'stable' in the version switcher also brings up " - f"the {self.git_tag} render." + f"the {self.version} render.\n" ) + message += "\nNOTE: the docs can take several minutes to finish building." self.wait_for_done(message) message = ( - f"Visit https://scitools-iris.readthedocs.io/en/{self.strings.branch} " + f"Visit https://scitools-iris.readthedocs.io/en/{self.version.branch} " "to confirm:\n\n" "- The docs have rendered\n" - f"- The version badge in the top left includes: {self.strings.branch} .\n" - f"- {self.strings.branch} is NOT available in RTD's version switcher.\n\n" + f"- The version badge in the top left includes: {self.version.branch} .\n" + f"- {self.version.branch} is NOT available in RTD's version switcher.\n\n" "NOTE: the docs can take several minutes to finish building." ) self.wait_for_done(message) @@ -555,31 +823,33 @@ def check_pypi(self): message = ( "Confirm that the following URL is correctly populated:\n" - f"https://pypi.org/project/scitools-iris/{self.strings.release}/" + f"https://pypi.org/project/scitools-iris/{self.version.short}/" ) self.wait_for_done(message) - message = ( - f"Confirm that {self.strings.release} is at the top of this page:\n" - "https://pypi.org/project/scitools-iris/#history" - ) - self.wait_for_done(message) + if self.is_latest_tag: + message = ( + f"Confirm that {self.version.short} is at the top of this page:\n" + "https://pypi.org/project/scitools-iris/#history" + ) + self.wait_for_done(message) if self.is_release_candidate: message = ( - f"Confirm that {self.strings.release} is marked as a " + f"Confirm that {self.version.short} is marked as a " f"pre-release on this page:\n" "https://pypi.org/project/scitools-iris/#history" ) - else: + self.wait_for_done(message) + elif self.is_latest_tag: message = ( - f"Confirm that {self.strings.release} is the tag shown on the " + f"Confirm that {self.version.short} is the tag shown on the " "scitools-iris PyPI homepage:\n" "https://pypi.org/project/scitools-iris/" ) - self.wait_for_done(message) + self.wait_for_done(message) - def validate(sha256_string: str) -> str: + def validate(sha256_string: str) -> str | None: valid = True try: _ = int(sha256_string, 16) @@ -589,13 +859,15 @@ def validate(sha256_string: str) -> str: if not valid: self.report_problem("Invalid SHA256 hash. Please try again ...") + result = None else: - return sha256_string + result = sha256_string + return result message = ( - f"Visit the below and click `view hashes` for the Source Distribution" + f"Visit the below to view the details for the Source Distribution" f"(`.tar.gz`):\n" - f"https://pypi.org/project/scitools-iris/{self.strings.release}#files\n" + f"https://pypi.org/project/scitools-iris/{self.version.short}##scitools_iris-{self.version.short}.tar.gz\n" ) self.set_value_from_input( key="sha256", @@ -606,9 +878,10 @@ def validate(sha256_string: str) -> str: message = ( "Confirm that pip install works as expected:\n" + "Beware of any Python pin Iris might have when creating your Conda environment!\n" "conda create -y -n tmp_iris pip cf-units;\n" "conda activate tmp_iris;\n" - f"pip install scitools-iris=={self.strings.release};\n" + f"pip install scitools-iris=={self.version.short};\n" 'python -c "import iris; print(iris.__version__)";\n' "conda deactivate;\n" "conda remove -n tmp_iris --all;\n" @@ -645,13 +918,10 @@ def update_conda_forge(self): "`rc` / `release-candidate` / similar .\n" ) rc_branch = self.get_input( - message, - "Input the name of the release candidate branch" + message, "Input the name of the release candidate branch" ) - message = ( - f"Is the latest commit on {rc_branch} over 1 month ago?" - ) + message = f"Is the latest commit on {rc_branch} over 1 month ago?" archive_rc = None while archive_rc is None: valid_entries = ["y", "n"] @@ -707,15 +977,16 @@ def update_conda_forge(self): "release:\n" "git fetch upstream;\n" f"git checkout upstream/{upstream_branch} -b " - f"{self.git_tag};\n" + f"{self.version};\n" ) self.wait_for_done(message) message = ( "Update ./recipe/meta.yaml:\n\n" f"- The version at the very top of the file: " - f"{self.strings.release}\n" + f"{self.version.short}\n" f"- The sha256 hash: {self.sha256}\n" + "- Build number: reset to 0 (or advance it if this is not a new release).\n" "- Requirements: align the packages and pins with those in the " "Iris repo\n" "- Maintainers: update with any changes to the dev team\n" @@ -723,6 +994,12 @@ def update_conda_forge(self): "date, e.g. is the licence info still correct? Ask the lead " "Iris developers if unsure.\n" ) + if not self.is_latest_tag: + message += ( + f"\nNOTE: {self.version} is not the latest Iris release, so " + "you may need to restore settings from an earlier version " + f"(check previous {self.version.minor_series} releases)." + ) self.wait_for_done(message) # TODO: automate @@ -730,9 +1007,11 @@ def update_conda_forge(self): "No other file normally needs changing in iris-feedstock, " "so push up " "the changes to prepare for a Pull Request:\n" + "WARNING: accidentally pushing straight to conda-forge (instead " + "of your fork) will instantly trigger a release!\n" f"git add recipe/meta.yaml;\n" - f'git commit -m "Recipe updates for {self.git_tag} .";\n' - f"git push -u origin {self.git_tag};" + f'git commit -m "Recipe updates for {self.version} .";\n' + f"git push -u origin {self.version};" ) self.wait_for_done(message) @@ -740,13 +1019,14 @@ def update_conda_forge(self): base_org="conda-forge", base_repo="iris-feedstock", base_branch=upstream_branch, - head_branch=self.git_tag, + head_branch=f"{self.version}", ) if self.is_release_candidate: - readme_url = f"https://github.com/{self.github_user}/iris-feedstock/blob/{self.git_tag}/README.md" + readme_url = f"https://github.com/{self.github_user}/iris-feedstock/blob/{self.version}/README.md" rc_evidence = ( - "\n\nConfirm that conda-forge knows your changes are for the " + "\n\nAfter conda-forge has committed the re-render: " + "confirm that conda-forge knows your changes are for the " "release candidate channel by checking the below README file. " "This should make multiple references to the `rc_iris` label:\n" f"{readme_url}" @@ -770,39 +1050,64 @@ def update_conda_forge(self): self.wait_for_done(message) message = ( - f"Confirm that {self.strings.release} appears in this list:\n" + f"Confirm that {self.version.short} appears in this list:\n" "https://anaconda.org/conda-forge/iris/files" ) self.wait_for_done(message) - if not self.is_release_candidate: + if not self.is_release_candidate and self.is_latest_tag: message = ( - f"Confirm that {self.strings.release} is displayed on this " + f"Confirm that {self.version.short} is displayed on this " "page as the latest available:\n" "https://anaconda.org/conda-forge/iris" ) self.wait_for_done(message) + message = ( + "The new release will now undergo testing and validation in the " + "cf-staging channel. Once this is complete, the release will be " + "available in the standard conda-forge channel. This can " + "sometimes take minutes, or up to an hour.\n" + "We'll come back later." + ) + self.print(message) + + if not self.is_latest_tag and not self.more_patches_after_this_one: + latest_version = max(self._get_tagged_versions()) + message = ( + f"{self.version} is not the latest Iris release, so the " + f"{upstream_branch} branch needs to be restored to reflect " + f"{latest_version}, to minimise future confusion.\n" + "Do this via a new pull request. So long as the version number " + "and build number match the settings from the latest release, " + "no new conda-forge release will be triggered.\n" + ) + self.wait_for_done(message) + + # As many steps as possible will be put between update_conda_forge and + # revisit_conda_forge, given the delay. + + def revisit_conda_forge(self): + self.print("Revisiting conda-forge ...") + if self.is_release_candidate: channel_command = " -c conda-forge/label/rc_iris " else: channel_command = " -c conda-forge " message = ( - "The new release will now undergo testing and validation in the " - "cf-staging channel. Once this is complete, the release will be " - "available in the standard conda-forge channel. This can " - "sometimes take minutes, or up to an hour.\n" "Confirm that the new release is available for use from " "conda-forge by running the following command:\n" - f"conda search{channel_command}iris=={self.strings.release};" + f"conda search{channel_command}iris=={self.version.short};" ) self.wait_for_done(message) message = ( "Confirm that conda (or mamba) install works as expected:\n" + "If anything is wrong: consider whether announcement(s) might need " + "undoing/updating.\n" f"conda create -n tmp_iris{channel_command}iris=" - f"{self.strings.release};\n" + f"{self.version.short};\n" "conda activate tmp_iris;\n" 'python -c "import iris; print(iris.__version__)";\n' "conda deactivate;\n" @@ -815,13 +1120,13 @@ def update_links(self): message = ( "Revisit the GitHub release:\n" - f"https://github.com/SciTools/iris/releases/tag/{self.git_tag}\n" + f"https://github.com/SciTools/iris/releases/tag/{self.version}\n" "You have confirmed that Read the Docs, PyPI and conda-forge have all " "updated correctly. Include the following links in the release " "notes:\n\n" - f"https://scitools-iris.readthedocs.io/en/{self.git_tag}/\n" - f"https://pypi.org/project/scitools-iris/{self.strings.release}/\n" - f"https://anaconda.org/conda-forge/iris?version={self.strings.release}\n" + f"https://scitools-iris.readthedocs.io/en/{self.version}/\n" + f"https://pypi.org/project/scitools-iris/{self.version.short}/\n" + f"https://anaconda.org/channels/conda-forge/packages/iris/files?file_q={self.version.short}\n" ) self.wait_for_done(message) @@ -842,7 +1147,7 @@ def update_links(self): message = ( f"Comment on {discussion_url} to notify anyone watching that " - f"{self.git_tag} has been released." + f"{self.version} has been released." ) self.wait_for_done(message) @@ -851,13 +1156,14 @@ def bluesky_announce(self): "Announce the release via https://bsky.app/profile/scitools.bsky.social, " "and any " "other appropriate message boards (e.g. Viva Engage).\n" + "Visuals like plots or screenshots are GREAT!\n" "Any content used for the announcement should be stored in the " "SciTools/bluesky-scitools GitHub repo.\n" ) if not self.first_in_series: message += ( f"Consider replying within an existing " - f"{self.strings.series} " + f"{self.version.minor_series} " "announcement thread, if appropriate." ) self.wait_for_done(message) @@ -870,29 +1176,56 @@ def merge_back(self): "preserve the commit SHA's." ) - if self.first_in_series: - # TODO: automate - - working_branch = self.strings.branch + ".mergeback" - self._delete_local_branch(working_branch) - message = ( - "Checkout a local branch from the official ``main`` branch.\n" - "git fetch upstream;\n" - f"git checkout upstream/main -b {working_branch};" + def next_series_patch() -> IrisVersion: + tagged_versions = self._get_tagged_versions() + series_all = sorted(set(v.minor_series for v in tagged_versions)) + try: + next_series = series_all[ + series_all.index(self.version.minor_series) + 1 + ] + except (IndexError, ValueError): + message = f"Error finding next minor_series after {self.version.minor_series} ." + raise RuntimeError(message) + + series_latest = max( + v for v in tagged_versions if v.minor_series == next_series ) - self.wait_for_done(message) - - message = ( - f"Merge in the commits from {self.strings.branch}.\n" - f"{merge_commit}\n" - f"git merge upstream/{self.strings.branch} --no-ff " - '-m "Merging release branch into main";' + return IrisVersion( + f"{series_latest.major}.{series_latest.minor}.{series_latest.micro + 1}" ) - self.wait_for_done(message) + if self.more_patches_after_this_one: + message = "More minor_series need patching. Merge into the next minor_series' branch ..." + self.print(message) + next_patch = next_series_patch() + target_branch = next_patch.branch + working_branch = f"{self.version}-to-{target_branch}" + else: + next_patch = None + target_branch = "main" + working_branch = self.version.branch + ".mergeback" + + # TODO: automate + self._delete_local_branch(working_branch) + message = ( + "Checkout a local branch from the official branch.\n" + f"git fetch {self.github_scitools};\n" + f"git checkout {self.github_scitools}/{target_branch} -b {working_branch};" + ) + self.wait_for_done(message) + + message = ( + f"Merge in the commits from {self.version.branch}.\n" + f"{merge_commit}\n" + f"git merge {self.github_scitools}/{self.version.branch} --no-ff " + f'-m "Merging {self.version.branch} into {target_branch}";' + ) + self.wait_for_done(message) + + if self.first_in_series: message = ( "Recreate the What's New template from ``main``:\n" - f"git checkout upstream/main {self.whats_news.template.absolute()};\n" + f"git checkout {self.github_scitools}/main {self.whats_news.template.absolute()};\n" ) self.wait_for_done(message) @@ -900,7 +1233,6 @@ def merge_back(self): "Recreate the What's New latest from the template:\n" f"cp {self.whats_news.template.absolute()} " f"{self.whats_news.latest.absolute()};\n" - f"git add {self.whats_news.latest.absolute()};\n" ) self.wait_for_done(message) @@ -912,7 +1244,7 @@ def merge_back(self): self.wait_for_done(message) message = ( - f"In {self.whats_news.index.absolute()}:\n" + f"In {self.whats_news.index_.absolute()}:\n" f"Add {self.whats_news.latest.name} to the top of the list of .rst " f"files, " f"and set the top include:: to be {self.whats_news.latest.name} ." @@ -921,46 +1253,77 @@ def merge_back(self): message = ( "Commit and push all the What's New changes.\n" - f"git add {self.whats_news.index.absolute()};\n" - 'git commit -m "Restore latest Whats New files.";\n' - f"git push -u origin {working_branch};" + f"git add {self.whats_news.latest.absolute()};\n" + f"git add {self.whats_news.index_.absolute()};\n" + 'git commit -m "Restore latest Whats-New files.";\n' + f"git push -u {self.github_fork} {working_branch};" ) self.wait_for_done(message) - self._create_pr( - base_org="SciTools", - base_repo="iris", - base_branch="main", - head_branch=working_branch, - ) - message = ( - "Work with the development team to get the PR merged.\n" - "Make sure the documentation is previewed during this process.\n" - f"{merge_commit}" + self._create_pr( + base_org="SciTools", + base_repo="iris", + base_branch=target_branch, + head_branch=working_branch, + ) + + message = ( + "COMBINING BRANCHES CAN BE RISKY; confirm that only the expected " + "commits are in the PR." + ) + self.wait_for_done(message) + + message = ( + "Work with the development team to get the PR merged.\n" + f"If {self.version.branch} includes any cherry-picks, there may be " + "merge conflicts to resolve.\n" + "Make sure the documentation is previewed during this process.\n" + f"{merge_commit}" + ) + self.wait_for_done(message) + + if self.more_patches_after_this_one: + self.print("Moving on to the next patch ...") + assert self.version != next_patch + + # Create a special new progress file which is set up for stepping + # through the next patch release. + next_patch_str = str(next_patch).replace(".", "_") + next_patch_stem = self._get_file_stem().with_stem(next_patch_str) + + class NextPatch(IrisRelease): + @classmethod + def _get_file_stem(cls) -> Path: + return next_patch_stem + + def run(self): + pass + + next_patch_kwargs = self.__getstate__() | dict( + git_tag=str(next_patch), + sha256=None, + latest_complete_step=NextPatch.get_steps().index(NextPatch.validate) + - 1, ) - self.wait_for_done(message) + next_patch_script = NextPatch(**next_patch_kwargs) + next_patch_script.save() - else: - message = ( - f"Propose a merge-back from {self.strings.branch} into " - f"``main`` by " - f"visiting this URL and clicking `Create pull request`:\n" - f"https://github.com/SciTools/iris/compare/main..." - f"{self.strings.branch}\n" - f"{merge_commit}" + new_command = ( + f"python {Path(__file__).absolute()} load " + f"{next_patch_script._file_path}" ) - self.wait_for_done(message) message = ( - f"Once the pull request is merged ensure that the " - f"{self.strings.branch} " - "release branch is restored.\n" - "GitHub automation rules may have automatically deleted the " - "release branch." + "Run the following command in a new terminal to address " + f"{next_patch} next:\n" + f"{new_command}" ) self.wait_for_done(message) def next_release(self): - if self.release_type != self.ReleaseTypes.PATCH and not self.is_release_candidate: + if ( + self.release_type is not self.ReleaseTypes.PATCH + and not self.is_release_candidate + ): self.print("Prep next release ...") message = ( diff --git a/tools/test_release_do_nothing.py b/tools/test_release_do_nothing.py new file mode 100644 index 0000000000..ed7240c6ef --- /dev/null +++ b/tools/test_release_do_nothing.py @@ -0,0 +1,1383 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Tests for the ``release_do_nothing.py`` file.""" + +import enum +from pathlib import Path +import re +from typing import Any, NamedTuple + +import nothing +import pytest +from pytest_mock import MockType + +from release_do_nothing import IrisRelease, IrisVersion + + +@pytest.fixture(autouse=True) +def mock_fast_print(mocker) -> None: + """Prevent the mod:`nothing` print methods from sleeping.""" + mocker.patch.object(nothing, "sleep", return_value=None) + + +@pytest.fixture(autouse=True) +def mock_git_remote_v(mocker) -> MockType: + """Mock :meth:`IrisRelease._git_remote_v`. + + Assumes return_value will be overridden by any calling test (the default + empty string will always error downstream). + """ + return mocker.patch.object( + IrisRelease, + "_git_remote_v", + return_value="", + ) + + +@pytest.fixture(autouse=True) +def mock_git_remote_get_url(mocker) -> MockType: + """Mock :meth:`IrisRelease._git_remote_get_url`. + + Assumes return_value will be overridden by any calling test (the default + empty string will always error downstream). + """ + return mocker.patch.object( + IrisRelease, + "_git_remote_get_url", + return_value="", + ) + + +@pytest.fixture(autouse=True) +def mock_git_ls_remote_tags(mocker) -> MockType: + """Mock :meth:`IrisRelease._git_ls_remote_tags`. + + Assumes return_value will be overridden by any calling test (the default + empty string will always error downstream). + """ + return mocker.patch.object( + IrisRelease, + "_git_ls_remote_tags", + return_value="", + ) + + +@pytest.fixture +def mock_wait_for_done(mocker) -> MockType: + """Mock :meth:`IrisRelease.wait_for_done` to not wait, and to count calls.""" + return mocker.patch.object(IrisRelease, "wait_for_done", return_value=None) + + +@pytest.fixture +def mock_report_problem(mocker) -> MockType: + return mocker.patch.object(IrisRelease, "report_problem") + + +def mock_inputs(mocker, *inputs: str) -> None: + """Mock :func:`input` to return chosen values, specified in a sequence.""" + mocker.patch("builtins.input", side_effect=inputs) + + +def assert_input_msg_regex(call: Any, expected: re.Pattern[str] | str) -> None: + # TODO: use this for testing ALL messages that include dynamic content? + if isinstance(expected, str): + expected = re.compile(expected, re.DOTALL) + assert hasattr(call, "args") + assert len(call.args) > 0 + message = call.args[0] + assert isinstance(message, str) + assert expected.search(message) is not None, ( + f"Expected message matching {expected!r} in {message!r}" + ) + + +class TestIrisVersion: + """Tests for the :class:`IrisVersion` class.""" + + @pytest.fixture(params=["9.0.0", "9.0.1", "9.1.0"], autouse=True) + def _setup(self, request): + self.version = IrisVersion(request.param) + self.input_str = request.param + + def test_str(self): + expecteds = {"9.0.0": "v9.0.0", "9.0.1": "v9.0.1", "9.1.0": "v9.1.0"} + assert str(self.version) == expecteds[self.input_str] + + def test_minor_series(self): + expecteds = {"9.0.0": "v9.0", "9.0.1": "v9.0", "9.1.0": "v9.1"} + assert self.version.minor_series == expecteds[self.input_str] + + def test_branch(self): + expecteds = {"9.0.0": "v9.0.x", "9.0.1": "v9.0.x", "9.1.0": "v9.1.x"} + assert self.version.branch == expecteds[self.input_str] + + +class TestProperties: + """Tests for the properties of the :class:`IrisRelease` class.""" + + @pytest.fixture(autouse=True) + def _setup(self) -> None: + self.instance = IrisRelease( + _dry_run=True, + latest_complete_step=len(IrisRelease.get_steps()) - 1, + github_scitools="foo", + github_fork="bar", + github_user="user", + patch_min_max_tag=("8.0.0", "9.0.0"), + git_tag="9.1.1", + sha256="abcd1234", + ) + + def test_version(self): + assert self.instance.version == IrisVersion("9.1.1") + + @pytest.mark.parametrize("git_tag", ["1.1.0", "1.1.2"]) + def test_is_latest_tag(self, git_tag, mock_git_ls_remote_tags): + mock_git_ls_remote_tags.return_value = "abcd1234 refs/tags/1.1.1\n" + expecteds = {"1.1.0": False, "1.1.2": True} + expected = expecteds[git_tag] + self.instance.git_tag = git_tag + assert self.instance.is_latest_tag is expected + + @pytest.mark.parametrize("git_tag", ["9.0.0", "9.1.0", "9.1.1"]) + def test_release_type(self, git_tag): + expecteds = { + "9.0.0": IrisRelease.ReleaseTypes.MAJOR, + "9.1.0": IrisRelease.ReleaseTypes.MINOR, + "9.1.1": IrisRelease.ReleaseTypes.PATCH, + } + expected = expecteds[git_tag] + self.instance.git_tag = git_tag + assert self.instance.release_type is expected + + @pytest.mark.parametrize("git_tag", ["9.1.0rc1", "9.1.0"]) + def test_is_release_candidate(self, git_tag): + expecteds = {"9.1.0rc1": True, "9.1.0": False} + expected = expecteds[git_tag] + self.instance.git_tag = git_tag + assert self.instance.is_release_candidate is expected + + @pytest.mark.parametrize("git_tag", ["1.0.0", "1.0.1", "1.1.0"]) + @pytest.mark.parametrize( + "release_complete", + [True, False], + ids=["release complete", "release not complete"], + ) + def test_first_in_series(self, git_tag, release_complete, mock_git_ls_remote_tags): + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/1.0.0\n" + "abcd1235 refs/tags/1.0.1\n" + "abcd1236 refs/tags/1.1.0\n" + ) + release_step = IrisRelease.get_steps().index(IrisRelease.cut_release) + + if release_complete: + self.instance.latest_complete_step = release_step + expecteds = {"1.0.0": False, "1.0.1": False, "1.1.0": True} + else: + self.instance.latest_complete_step = release_step - 1 + expecteds = {"1.0.0": False, "1.0.1": False, "1.1.0": False} + + expected = expecteds[git_tag] + self.instance.git_tag = git_tag + assert self.instance.first_in_series is expected + + def test_patch_min_max(self): + assert self.instance.patch_min_max == ( + IrisVersion("8.0.0"), + IrisVersion("9.0.0"), + ) + self.instance.patch_min_max_tag = ("9.0.0",) + with pytest.raises(AssertionError, match="^$"): + _ = self.instance.patch_min_max + + @pytest.mark.parametrize("git_tag", ["8.1.0", "8.1.1", "9.0.1", "9.1.1"]) + def test_more_patches_after_this_one(self, git_tag): + expecteds = { + "8.1.0": False, # Not a PATCH release. + "8.1.1": True, # 9.0.0 still to patch. + "9.0.1": False, # Last PATCH in minor_series. + "9.1.1": False, # Beyond max minor_series. + } + expected = expecteds[git_tag] + self.instance.git_tag = git_tag + assert self.instance.more_patches_after_this_one is expected + + def test_whats_news(self): + whatsnew_dir = Path(__file__).parents[1] / "docs" / "src" / "whatsnew" + expected = IrisRelease.WhatsNewRsts( + latest=whatsnew_dir / "latest.rst", + release=whatsnew_dir / "9.1.rst", + index_=whatsnew_dir / "index.rst", + template=whatsnew_dir / "latest.rst.template", + ) + assert self.instance.whats_news == expected + + +class TestAnalyseRemotes: + """Tests for the :meth:`IrisRelease.analyse_remotes` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_git_remote_get_url, mock_git_remote_v) -> None: + self.instance = IrisRelease(_dry_run=True) + mock_git_remote_get_url.return_value = "git@github.com:myself/iris.git" + mock_git_remote_v.return_value = ( + "origin git@github.com:myself/iris.git (fetch)\n" + "origin git@github.com:myself/iris.git (push)\n" + "upstream git@github.com:SciTools/iris.git (fetch)\n" + "upstream no_push (push)\n" + "foo git@github.com:foo/iris.git (fetch)\n" + "foo git@github.com:foo/iris.git (push)\n" + ) + + def test_github_scitools(self, mocker): + # The input is irrelevant to this test, we just need a valid input to + # get past that line so we can test the line that sets github_scitools. + mock_inputs(mocker, "0") + self.instance.analyse_remotes() + assert self.instance.github_scitools == "upstream" + + def test_no_forks(self, mock_git_remote_v): + # The only remote is 'upstream', so error. + # (Also confirms that upstream has been successfully ignored). + mock_git_remote_v.return_value = ( + "upstream git@github.com:SciTools/iris.git (fetch)\n" + "upstream no_push (push)\n" + ) + with pytest.raises(AssertionError, match="^$"): + self.instance.analyse_remotes() + + def test_choose_fork(self, mocker): + # Developer chooses a fork other than `myself`. + mock_inputs(mocker, "1") + self.instance.analyse_remotes() + assert self.instance.github_fork == "foo" + + def test_choose_fork_invalid(self, mocker, mock_report_problem): + # Mock an invalid input followed by a valid one. + mock_inputs(mocker, "99", "1") + self.instance.analyse_remotes() + mock_report_problem.assert_called_once_with( + "Invalid number. Please try again ..." + ) + + def test_derive_username(self, mocker): + mock_inputs(mocker, "0") + self.instance.analyse_remotes() + assert self.instance.github_user == "myself" + + def test_error_deriving_username(self, mocker, mock_git_remote_get_url): + mock_git_remote_get_url.return_value = "bad_url" + mock_inputs(mocker, "0") + with pytest.raises(RuntimeError, match="Error deriving GitHub username"): + self.instance.analyse_remotes() + + def test_default_fork_preserved(self, mocker): + self.instance.github_fork = "bar" + mock_inputs(mocker, "") + self.instance.analyse_remotes() + assert self.instance.github_fork == "bar" + + +class TestGetReleaseTag: + """Tests for the :meth:`IrisRelease.get_release_tag` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease(_dry_run=True) + mock_git_ls_remote_tags.return_value = "abcd1234 refs/tags/1.0.0" + + def test_valid_tag(self, mocker): + # User inputs a valid, non-existing tag + mock_inputs(mocker, "v1.1.0") + self.instance.get_release_tag() + assert self.instance.git_tag == "v1.1.0" + + def test_existing_tag(self, mocker, mock_report_problem): + # User tries an existing tag, then provides a valid one + mock_inputs(mocker, "v1.0.0", "v1.1.0") + self.instance.get_release_tag() + mock_report_problem.assert_called_once_with( + "Version v1.0.0 already exists as a git tag. Please try again ..." + ) + assert self.instance.git_tag == "v1.1.0" + + def test_invalid_version_format(self, mocker, mock_report_problem): + # User inputs invalid version format, then valid one + mock_inputs(mocker, "not-a-version", "v1.1.0") + self.instance.get_release_tag() + assert mock_report_problem.call_count == 1 + (call,) = mock_report_problem.call_args_list + (message,) = call.args + assert "Packaging error" in message + assert "Please try again" in message + assert self.instance.git_tag == "v1.1.0" + + def test_default_value_preserved(self, mocker): + # When loading from saved state, existing git_tag should be offered as default + self.instance.git_tag = "v1.1.0" + mock_inputs(mocker, "") # User accepts default + self.instance.get_release_tag() + assert self.instance.git_tag == "v1.1.0" + + +class TestGetAllPatches: + """Tests for the :meth:`IrisRelease.get_all_patches` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease( + _dry_run=True, + git_tag="v1.1.1", + ) + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\n" + "abcd1235 refs/tags/v1.0.1\n" + "abcd1237 refs/tags/v1.1.0rc1\n" + "abcd1239 refs/tags/v1.1.0\n" + "abcd1240 refs/tags/v1.2.0\n" + ) + + def test_not_patch_release(self): + # Non-PATCH releases skip this step + self.instance.git_tag = "v1.3.0" + self.instance.get_all_patches() + assert self.instance.patch_min_max_tag is None + + def test_patch_single_series(self, mocker): + # PATCH release, user doesn't want to patch multiple minor_series + mock_inputs(mocker, "1,1") + self.instance.get_all_patches() + assert self.instance.patch_min_max_tag == ("v1.1.1", "v1.1.1") + + def test_patch_multiple_series(self, mocker): + # User selects a range of minor_series to patch + mock_inputs(mocker, "1,2") + self.instance.get_all_patches() + assert self.instance.patch_min_max_tag == ("v1.1.1", "v1.2.1") + assert self.instance.git_tag == "v1.1.1" + + def test_invalid_format(self, mocker, mock_report_problem): + # User inputs invalid format, then valid input + mock_inputs(mocker, "not-numbers", "1,2") + self.instance.get_all_patches() + mock_report_problem.assert_called_once_with( + "Invalid input, expected two integers comma-separated. Please try again ..." + ) + assert self.instance.patch_min_max_tag == ("v1.1.1", "v1.2.1") + assert self.instance.git_tag == "v1.1.1" + + def test_invalid_numbers(self, mocker, mock_report_problem): + # User inputs out-of-range numbers, then valid input + mock_inputs(mocker, "99,100", "1,2") + self.instance.get_all_patches() + mock_report_problem.assert_called_once_with( + "Invalid numbers. Please try again ..." + ) + assert self.instance.patch_min_max_tag == ("v1.1.1", "v1.2.1") + assert self.instance.git_tag == "v1.1.1" + + def test_starts_with_earlier_patch(self, mocker, capfd): + # When patch_min is earlier than current git_tag, git_tag is updated + mock_inputs(mocker, "0,2") + self.instance.get_all_patches() + out, err = capfd.readouterr() + assert "Starting with v1.0.2. (v1.1.1 will be covered in sequence)" in out + assert self.instance.git_tag == "v1.0.2" + assert self.instance.patch_min_max_tag == ("v1.0.2", "v1.2.1") + + def test_default_value_preserved(self, mocker): + # When loading from saved state, existing patch_min_max_tag should work + self.instance.patch_min_max_tag = ("v1.0.2", "v1.2.1") + mock_inputs(mocker, "") + self.instance.get_all_patches() + assert self.instance.patch_min_max_tag == ("v1.0.2", "v1.2.1") + + +class TestApplyPatches: + """Tests for the :meth:`IrisRelease.apply_patches` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done) -> None: + self.instance = IrisRelease( + _dry_run=True, + git_tag="v1.1.1", + ) + self.mock_wait_for_done = mock_wait_for_done + + def get_wait_for_done_call(self) -> Any: + self.mock_wait_for_done.assert_called_once() + (call,) = self.mock_wait_for_done.call_args_list + return call + + def test_not_patch_release(self): + # Non-PATCH releases skip this step entirely. + self.instance.git_tag = "v1.2.0" + self.instance.apply_patches() + self.mock_wait_for_done.assert_not_called() + + def test_patch_branch_is_release_branch(self, mocker): + # User inputs the ideal branch - message confirms it is optimal. + mock_inputs(mocker, self.instance.version.branch) + self.instance.apply_patches() + call = self.get_wait_for_done_call() + branch = re.escape(self.instance.version.branch) + assert_input_msg_regex( + call, rf"patch change\(s\) are on the ideal branch.*{branch}.*" + ) + + def test_patch_branch_empty(self, mocker): + # User inputs nothing - message instructs them to create a PR. + mock_inputs(mocker, "") + self.instance.apply_patches() + call = self.get_wait_for_done_call() + branch = re.escape(self.instance.version.branch) + assert_input_msg_regex(call, rf"Propose the patch change\(s\).*{branch}.*") + + def test_patch_branch_other(self, mocker): + # User inputs a different branch - message warns about cherry-pick conflicts. + mock_inputs(mocker, "some-other-branch") + self.instance.apply_patches() + call = self.get_wait_for_done_call() + branch = re.escape(self.instance.version.branch) + assert_input_msg_regex( + call, + rf"cherry-picking the patch change\(s\).*some-other-branch.*{branch}.*", + ) + + +class TestValidate: + """Tests for the :meth:`IrisRelease.validate` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease( + _dry_run=True, github_user="user", patch_min_max_tag=("1.0.0", "1.1.0") + ) + mock_git_ls_remote_tags.return_value = "abcd1234 refs/tags/1.0.0" + + class Case(NamedTuple): + git_tag: str + match: str + + @pytest.fixture( + params=[ + pytest.param( + Case("1.1.dev0", "development release.*cannot handle"), + id="dev release", + ), + pytest.param( + Case("1.1.post0", "post release.*cannot handle"), + id="post release", + ), + pytest.param( + Case("1.1.alpha0", "release candidate.*got 'a'"), + id="pre-release non-rc", + ), + pytest.param( + Case( + "1.1.1rc0", "PATCH release AND a release candidate.*cannot handle" + ), + id="patch release rc", + ), + pytest.param( + Case("1.1.1", "No previous releases.*cannot handle a PATCH"), + id="first in series patch", + ), + ] + ) + def unhandled_cases(self, request) -> Case: + case = request.param + self.instance.git_tag = case.git_tag + return case + + def test_unhandled_cases(self, unhandled_cases): + case = unhandled_cases + with pytest.raises(RuntimeError, match=case.match): + self.instance.validate() + pass + + @pytest.fixture + def first_in_series_not_rc(self) -> None: + self.instance.git_tag = "1.1.0" + + def test_first_in_series_not_rc_message( + self, first_in_series_not_rc, capfd, mocker + ): + # Two "yes" answers to arrive at the appropriate decision node. + mock_inputs(mocker, "y", "y") + self.instance.validate() + out, err = capfd.readouterr() + assert "No previous releases" in out + assert "expected to be a release candidate" in out + assert "sure you want to continue" in out + + def test_first_in_series_not_rc_exit(self, first_in_series_not_rc, mocker): + # One "no" answer to arrive at the appropriate decision node. + mock_inputs(mocker, "n") + with pytest.raises(SystemExit): + self.instance.validate() + + def test_first_in_series_not_rc_continue(self, first_in_series_not_rc, mocker): + # Two "yes" answers to arrive at the appropriate decision node. + mock_inputs(mocker, "y", "y") + self.instance.validate() + + # Not an exhaustive list, just the inverse of the unhandled cases. + @pytest.fixture( + params=[ + pytest.param("2.0.0rc0", id="major release RC"), + pytest.param("1.1.0rc0", id="minor release RC"), + pytest.param("1.1.0", id="minor release existing major"), + pytest.param("1.0.1", id="patch release existing minor"), + pytest.param("1.1.0", id="first in series not RC"), + ] + ) + def handled_cases(self, request) -> None: + self.instance.git_tag = request.param + + def test_handled_cases(self, handled_cases, mocker, mock_wait_for_done): + # One "yes" answer to arrive at the appropriate decision node. + mock_inputs(mocker, "y") + self.instance.validate() + mock_wait_for_done.assert_called_once() + (call,) = mock_wait_for_done.call_args_list + assert_input_msg_regex(call, "Confirm that the details above are correct") + + +class TestReleaseHighlights: + """Tests for the :meth:`IrisRelease.release_highlights` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + mock_git_ls_remote_tags.return_value = "abcd1234 refs/tags/v1.0.0\n" + + @pytest.mark.parametrize( + "first", [True, False], ids=["first in series", "not first in series"] + ) + def test_release_highlights(self, first): + if first: + git_tag = "v1.1.0" + else: + git_tag = "v1.0.1" + self.instance.git_tag = git_tag + + self.instance.release_highlights() + + if first: + self.mock_wait_for_done.assert_called_once() + (call,) = self.mock_wait_for_done.call_args_list + assert_input_msg_regex( + call, + "Assemble some bullet points summarising the highlights", + ) + else: + self.mock_wait_for_done.assert_not_called() + + +class TestUpdateStandardNames: + """Tests for the :meth:`IrisRelease.update_standard_names` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\nabcd1235 refs/tags/v1.0.1\n" + ) + + def test_not_first_in_series(self): + # Not first in series - method does nothing. + self.instance.git_tag = "v1.0.2" + self.instance.update_standard_names() + self.mock_wait_for_done.assert_not_called() + + def test_wait_messages(self): + # First in series. No other branching behaviour, so just a cursory check + # for the expected messages. + self.instance.git_tag = "v1.1.0" + self.instance.update_standard_names() + assert self.mock_wait_for_done.call_count == 5 + delete, checkout, update, pr, merge = self.mock_wait_for_done.call_args_list + message_fragments = [ + (delete, "avoid a name clash by deleting any existing local branch"), + (checkout, "Checkout a local branch from the official"), + (update, "Update the CF standard names table"), + (pr, "Create a Pull Request for your changes"), + (merge, "Work with the development team to get the PR merged"), + ] + for call, expected in message_fragments: + assert_input_msg_regex(call, expected) + + +class TestCheckDeprecations: + """Tests for the :meth:`IrisRelease.check_deprecations` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + + @pytest.mark.parametrize("git_tag", ["v1.1.0", "v1.1.1"]) + def test_not_major_release(self, git_tag): + # Not a MAJOR release - method does nothing. + self.instance.git_tag = git_tag + self.instance.check_deprecations() + self.mock_wait_for_done.assert_not_called() + + def test_major_release(self): + # MAJOR release - code block is active. + self.instance.git_tag = "v1.0.0" + self.instance.check_deprecations() + self.mock_wait_for_done.assert_called_once() + (call,) = self.mock_wait_for_done.call_args_list + assert_input_msg_regex(call, "be sure to finalise all deprecations") + + +class TestCreateReleaseBranch: + """Tests for the :meth:`IrisRelease.create_release_branch` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\nabcd1235 refs/tags/v1.0.1\n" + ) + + def test_first_in_series(self): + self.instance.git_tag = "v1.1.0" + self.instance.create_release_branch() + self.mock_wait_for_done.assert_called_once() + (call,) = self.mock_wait_for_done.call_args_list + assert_input_msg_regex( + call, f"create the ``{self.instance.version.branch}`` release branch" + ) + + def test_not_first_in_series(self): + self.instance.git_tag = "v1.0.2" + self.instance.create_release_branch() + self.mock_wait_for_done.assert_called_once() + (call,) = self.mock_wait_for_done.call_args_list + assert_input_msg_regex( + call, + "If necessary: cherry-pick any specific commits that are needed", + ) + + +class TestFinaliseWhatsNew: + """Tests for the :meth:`IrisRelease.finalise_whats_new` method.""" + + class WaitMessages(enum.StrEnum): + DELETE = "avoid a name clash by deleting any existing local branch" + CHECKOUT = "Checkout a local branch from the official" + CUT = "'Cut' the What's New for the release" + REFS = r"Replace references to.*latest\.rst with.*{series}" + TITLE = r"set the page title to.*\nv{series}" + UNDERLINE = "ensure the page title underline is the exact same length" + DROPDOWN_HIGHLIGHT = r"set the sphinx-design dropdown title.*\nv{series}" + REFLECTION = "ensure it is a good reflection of what is new" + HIGHLIGHTS = "populate the Release Highlights dropdown" + DROPDOWN_PATCH = "Create a patch dropdown section" + TEMPLATE = "Remove the What's New template file" + PUSH = "Commit and push all the What's New changes" + PR = "Create a Pull Request for your changes" + MERGE = "Work with the development team to get the PR merged" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\n" + "abcd1235 refs/tags/v1.0.1\n" + "abcd1236 refs/tags/v1.1.0rc0\n" + ) + + def common_test(self, git_tag, expected_messages): + self.instance.git_tag = git_tag + self.instance.finalise_whats_new() + assert self.mock_wait_for_done.call_count == len(expected_messages) + for call, expected in zip( + self.mock_wait_for_done.call_args_list, + expected_messages, + ): + expected = expected.format( + series=re.escape(self.instance.version.minor_series[1:]) + ) + assert_input_msg_regex(call, expected) + + def test_first_in_series(self): + expected_messages = [ + self.WaitMessages.DELETE, + self.WaitMessages.CHECKOUT, + self.WaitMessages.CUT, + self.WaitMessages.REFS, + self.WaitMessages.TITLE, + self.WaitMessages.UNDERLINE, + self.WaitMessages.DROPDOWN_HIGHLIGHT, + self.WaitMessages.REFLECTION, + self.WaitMessages.HIGHLIGHTS, + self.WaitMessages.TEMPLATE, + self.WaitMessages.PUSH, + self.WaitMessages.PR, + self.WaitMessages.MERGE, + ] + self.common_test("v1.2.0", expected_messages) + + def test_minor_not_first(self): + expected_messages = [ + self.WaitMessages.DELETE, + self.WaitMessages.CHECKOUT, + self.WaitMessages.TITLE, + self.WaitMessages.UNDERLINE, + self.WaitMessages.DROPDOWN_HIGHLIGHT, + self.WaitMessages.REFLECTION, + self.WaitMessages.HIGHLIGHTS, + self.WaitMessages.PUSH, + self.WaitMessages.PR, + self.WaitMessages.MERGE, + ] + self.common_test("v1.1.0", expected_messages) + + def test_patch(self): + expected_messages = [ + self.WaitMessages.DELETE, + self.WaitMessages.CHECKOUT, + self.WaitMessages.DROPDOWN_PATCH, + self.WaitMessages.PUSH, + self.WaitMessages.PR, + self.WaitMessages.MERGE, + ] + self.common_test("v1.0.2", expected_messages) + + +class TestCutRelease: + """Tests for the :meth:`IrisRelease.cut_release` method.""" + + class WaitMessages(enum.StrEnum): + WEBPAGE = "Visit https://github.com/SciTools/iris/releases/new" + TAG = "as the new tag to create, and also as the Release title" + TEXT = "Populate the main text box" + INSTALL_RC = "This is a release candidate - include the following instructions" + TICK_RC = "This is a release candidate - tick the box" + LATEST = "Tick the box to set this as the latest release" + NOT_LATEST = "Un-tick the latest release box." + PUBLISH = "Click: Publish release !" + URL = "Visit https://github.com/SciTools/iris/actions/workflows/ci-wheels.yml" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\n" + "abcd1235 refs/tags/v1.0.1\n" + "abcd1236 refs/tags/v1.1.0\n" + ) + + def common_test(self, git_tag, expected_messages): + self.instance.git_tag = git_tag + self.instance.cut_release() + assert self.mock_wait_for_done.call_count == len(expected_messages) + for call, expected in zip( + self.mock_wait_for_done.call_args_list, + expected_messages, + ): + assert_input_msg_regex(call, expected) + + def test_latest(self): + self.instance.git_tag = "v1.2.0" + expected_messages = [ + self.WaitMessages.WEBPAGE, + self.WaitMessages.TAG, + self.WaitMessages.TEXT, + self.WaitMessages.LATEST, + self.WaitMessages.PUBLISH, + self.WaitMessages.URL, + ] + self.common_test("v1.2.0", expected_messages) + + def test_not_latest(self): + expected_messages = [ + self.WaitMessages.WEBPAGE, + self.WaitMessages.TAG, + self.WaitMessages.TEXT, + self.WaitMessages.NOT_LATEST, + self.WaitMessages.PUBLISH, + self.WaitMessages.URL, + ] + self.common_test("v1.0.2", expected_messages) + + def test_release_candidate(self): + expected_messages = [ + self.WaitMessages.WEBPAGE, + self.WaitMessages.TAG, + self.WaitMessages.TEXT, + self.WaitMessages.INSTALL_RC, + self.WaitMessages.TICK_RC, + self.WaitMessages.PUBLISH, + self.WaitMessages.URL, + ] + self.common_test("v1.2.0rc0", expected_messages) + + +class TestCheckRtd: + """Tests for the :meth:`IrisRelease.check_rtd` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\n" + "abcd1235 refs/tags/v1.0.1\n" + "abcd1236 refs/tags/v1.1.0\n" + ) + + @pytest.mark.parametrize("latest", [True, False], ids=["is_latest", "not_latest"]) + @pytest.mark.parametrize("rc", [True, False], ids=["is_rc", "not_rc"]) + def test_default(self, latest: bool, rc: bool): + if latest: + git_tag = "v1.2.0" + else: + git_tag = "v1.0.2" + if rc: + git_tag += "rc0" + self.instance.git_tag = git_tag + self.instance.check_rtd() + series = re.escape(self.instance.version.minor_series) + expected_messages = [ + "Visit https://app.readthedocs.org/projects/scitools-iris/", + rf"{series}.* to Active, un-Hidden", + rf"{series}.* to Active, Hidden", + "Keep only the latest 2 branch doc builds active", + rf"{series}.* is available in RTD's version switcher", + rf"{series}.* is NOT available in RTD's version switcher", + ] + call_args_list = self.mock_wait_for_done.call_args_list + assert self.mock_wait_for_done.call_count == len(expected_messages) + for call, expected in zip(call_args_list, expected_messages): + assert_input_msg_regex(call, expected) + + (check_message,) = call_args_list[4][0] + check_expected = "Selecting 'stable' in the version switcher" + if latest and not rc: + assert check_expected in check_message + else: + assert check_expected not in check_message + + +class TestCheckPyPI: + """Tests for the :meth:`IrisRelease.check_pypi` method.""" + + class WaitMessages(enum.StrEnum): + URL = "Confirm that the following URL is correctly populated" + TOP = "{public} is at the top of this page" + PRE_RELEASE = "{public} is marked as a pre-release on this page" + TAG = "{public} is the tag shown on the scitools-iris PyPI homepage" + INSTALL = "Confirm that pip install works as expected" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags, mocker) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + # For the PyPI SHA256 input. + mock_inputs( + mocker, + "ccc8025d24b74d86ab780266cb9f708c468ac53426a45fab20bfc315c68383f7", + ) + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\n" + "abcd1235 refs/tags/v1.0.1\n" + "abcd1236 refs/tags/v1.2.0\n" + ) + + def common_test(self, git_tag, expected_messages): + self.instance.git_tag = git_tag + self.instance.check_pypi() + assert self.mock_wait_for_done.call_count == len(expected_messages) + for call, expected in zip( + self.mock_wait_for_done.call_args_list, + expected_messages, + ): + expected = expected.format(public=re.escape(self.instance.version.short)) + assert_input_msg_regex(call, expected) + + def test_latest(self): + expected_messages = [ + self.WaitMessages.URL, + self.WaitMessages.TOP, + self.WaitMessages.TAG, + self.WaitMessages.INSTALL, + ] + self.common_test("v1.3.0", expected_messages) + + def test_not_latest(self): + expected_messages = [ + self.WaitMessages.URL, + self.WaitMessages.INSTALL, + ] + self.common_test("v1.0.2", expected_messages) + + def test_release_candidate(self): + expected_messages = [ + self.WaitMessages.URL, + self.WaitMessages.PRE_RELEASE, + self.WaitMessages.INSTALL, + ] + self.common_test("v1.1.0rc0", expected_messages) + + def test_latest_and_rc(self): + expected_messages = [ + self.WaitMessages.URL, + self.WaitMessages.TOP, + self.WaitMessages.PRE_RELEASE, + self.WaitMessages.INSTALL, + ] + self.common_test("v1.3.0rc0", expected_messages) + + def test_sha256_input(self, mocker, capfd): + self.instance.git_tag = "v1.3.0" + fake_sha = "3b2f4091883d1e401192b4f64aead9e4bbdb84854b74c984614d79742b2fab96" + mock_inputs(mocker, fake_sha) + self.instance.check_pypi() + out, err = capfd.readouterr() + assert "Visit the below to view the details" in out + assert self.instance.sha256 == fake_sha + + def test_invalid_sha(self, mocker, mock_report_problem): + self.instance.git_tag = "v1.3.0" + fake_sha = "3b2f4091883d1e401192b4f64aead9e4bbdb84854b74c984614d79742b2fab96" + mock_inputs(mocker, "not-a-sha", fake_sha) + self.instance.check_pypi() + mock_report_problem.assert_called_once_with( + "Invalid SHA256 hash. Please try again ..." + ) + assert self.instance.sha256 == fake_sha + + def test_sha_default_value_preserved(self, mocker): + self.instance.git_tag = "v1.3.0" + fake_sha = "3b2f4091883d1e401192b4f64aead9e4bbdb84854b74c984614d79742b2fab96" + self.instance.sha256 = fake_sha + mock_inputs(mocker, "") + self.instance.check_pypi() + assert self.instance.sha256 == fake_sha + + +class TestUpdateCondaForge: + """Tests for the :meth:`IrisRelease.update_conda_forge` method.""" + + class WaitMessages(enum.StrEnum): + FORK = "Make sure you have a GitHub fork of" + RC_BRANCHES = "Visit the conda-forge feedstock branches page" + # `rc-original` = just the value used in these tests + RC_ARCHIVE = "Archive the rc-original branch" + CHECKOUT = "Checkout a new branch for the conda-forge" + + UPDATE = re.escape("Update ./recipe/meta.yaml:") + r".*unsure\.$" + UPDATE_NOT_LATEST = ( + re.escape("Update ./recipe/meta.yaml:") + + r".*unsure\..*{version} is not the latest Iris release" + ) + + PUSH = "push up the changes to prepare for a Pull Request" + PR = "Create a Pull Request for your changes" + + AUTO = r"Follow the automatic conda-forge guidance.*Pull Request\.$" + AUTO_RC = r"Follow the automatic conda-forge guidance.*Pull Request\..*release candidate" + + MAINTAINERS = "Work with your fellow feedstock maintainers" + CI = "wait for the CI to complete" + LIST = r"Confirm that {public} appears in this list:" + LATEST = "is displayed on this page as the latest available" + PATCH = r"{version} is not the latest Iris release" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\n" + "abcd1235 refs/tags/v1.0.1\n" + "abcd1236 refs/tags/v1.1.0\n" + "abcd1237 refs/tags/v2.0.0\n" + ) + + @pytest.mark.parametrize("latest", [True, False], ids=["is_latest", "not_latest"]) + @pytest.mark.parametrize("rc", [True, False], ids=["is_rc", "not_rc"]) + @pytest.mark.parametrize( + "more_patches", [True, False], ids=["more_patches", "no_more_patches"] + ) + def test_waits(self, latest: bool, rc: bool, more_patches: bool, mocker): + if latest: + git_tag = "v2.1" + else: + git_tag = "v1.2" + if more_patches: + git_tag += ".1" + else: + git_tag += ".0" + if rc: + git_tag += "rc0" + self.instance.git_tag = git_tag + if more_patches: + self.instance.patch_min_max_tag = (git_tag, "v2.2.1") + + # All inputs relate to handling of the release candidate branch. We + # choose the inputs that allow exercising every wait message. + mock_inputs(mocker, "rc-original", "y", "rc-new") + + expected_messages = list(self.WaitMessages) + if not rc: + expected_messages.remove(self.WaitMessages.RC_BRANCHES) + expected_messages.remove(self.WaitMessages.RC_ARCHIVE) + expected_messages.remove(self.WaitMessages.AUTO_RC) + else: + expected_messages.remove(self.WaitMessages.AUTO) + + if latest: + expected_messages.remove(self.WaitMessages.UPDATE_NOT_LATEST) + else: + expected_messages.remove(self.WaitMessages.UPDATE) + + if rc or not latest: + expected_messages.remove(self.WaitMessages.LATEST) + + if latest or more_patches: + expected_messages.remove(self.WaitMessages.PATCH) + + self.instance.update_conda_forge() + assert self.mock_wait_for_done.call_count == len(expected_messages) + for call, expected in zip( + self.mock_wait_for_done.call_args_list, + expected_messages, + ): + expected_str = expected.format( + public=re.escape(self.instance.version.short), + version=re.escape(str(self.instance.version)), + ) + assert_input_msg_regex(call, expected_str) + + def test_original_rc_branch_name(self, mocker): + self.instance.git_tag = "v2.1.0rc0" + mock_inputs(mocker, "my-special-rc-branch", "y", "rc-new") + self.instance.update_conda_forge() + wait_messages = [ + call.args[0] for call in self.mock_wait_for_done.call_args_list + ] + expected = self.WaitMessages.RC_ARCHIVE.replace( + "rc-original", "my-special-rc-branch" + ) + not_expected = self.WaitMessages.RC_ARCHIVE + assert any(re.search(expected, m) for m in wait_messages) + assert not any(re.search(not_expected, m) for m in wait_messages) + + @pytest.mark.parametrize("rc", [True, False], ids=["is_rc", "not_rc"]) + def test_new_rc_branch_name(self, rc, mocker): + git_tag = "v1.2.0" + if rc: + git_tag += "rc0" + self.instance.git_tag = git_tag + mock_inputs(mocker, "rc-original", "y", "rc-new") + self.instance.update_conda_forge() + all_calls = [call.args[0] for call in self.mock_wait_for_done.call_args_list] + calls = [ + call + for call in all_calls + if any( + phrase in call + for phrase in [ + "Checkout a new branch", + "Create a Pull Request", + "branch needs to be restored", + ] + ) + ] + expected = "rc-new" if rc else "main" + assert all(expected in c for c in calls) + + def test_young_rc_branch(self, mocker): + self.instance.git_tag = "v2.1.0rc0" + mock_inputs(mocker, "rc-original", "n") + self.instance.update_conda_forge() + wait_messages = [ + call.args[0] for call in self.mock_wait_for_done.call_args_list + ] + regex = re.compile(self.WaitMessages.RC_ARCHIVE) + assert all(regex.search(m) is None for m in wait_messages) + + def test_invalid_rc_branch_age(self, mocker, mock_report_problem): + self.instance.git_tag = "v2.1.0rc0" + # Invalid entry, then valid "n". + mock_inputs(mocker, "rc-original", "maybe", "n") + self.instance.update_conda_forge() + mock_report_problem.assert_called_once_with( + "Invalid entry. Please try again ..." + ) + + +class TestRevisitCondaForge: + """Tests for the :meth:`IrisRelease.revisit_conda_forge` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease(_dry_run=True, git_tag="v1.2.0") + self.mock_wait_for_done = mock_wait_for_done + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\n" + "abcd1235 refs/tags/v1.0.1\n" + "abcd1236 refs/tags/v1.1.0\n" + "abcd1237 refs/tags/v2.0.0\n" + ) + + def test_waits(self): + self.instance.revisit_conda_forge() + assert self.mock_wait_for_done.call_count == 2 + available, installable = self.mock_wait_for_done.call_args_list + message_fragments = [ + (available, "Confirm that the new release is available"), + ( + installable, + re.escape("Confirm that conda (or mamba) install works as expected"), + ), + ] + for call, expected in message_fragments: + assert_input_msg_regex(call, expected) + + @pytest.mark.parametrize("rc", [True, False], ids=["is_rc", "not_rc"]) + def test_channel_command(self, rc, mocker): + git_tag = "v1.2.0" + if rc: + git_tag += "rc0" + self.instance.git_tag = git_tag + mock_inputs(mocker, "rc-original", "n") + self.instance.revisit_conda_forge() + if rc: + assert any( + "label/rc_iris" in call.args[0] + for call in self.mock_wait_for_done.call_args_list + ) + else: + assert not any( + "label/rc_iris" in call.args[0] + for call in self.mock_wait_for_done.call_args_list + ) + + +class TestUpdateLinks: + """Tests for the :meth:`IrisRelease.update_links` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done) -> None: + self.instance = IrisRelease(_dry_run=True, git_tag="v1.2.0") + self.mock_wait_for_done = mock_wait_for_done + + def test_waits(self, mocker): + mock_inputs(mocker, "some-url") + self.instance.update_links() + assert self.mock_wait_for_done.call_count == 3 + revisit, update, comment = self.mock_wait_for_done.call_args_list + message_fragments = [ + (revisit, "Revisit the GitHub release:"), + (update, "Update .* with the above links and anything else appropriate"), + (comment, "notify anyone watching"), + ] + for call, expected in message_fragments: + assert_input_msg_regex(call, expected) + + def test_url_input(self, mocker, capfd): + mock_inputs(mocker, "some-url") + self.instance.update_links() + out, err = capfd.readouterr() + assert "What is the URL for the GitHub discussions page" in out + revisit, update, comment = self.mock_wait_for_done.call_args_list + assert_input_msg_regex(update, "some-url") + assert_input_msg_regex(comment, "some-url") + + +class TestBlueskyAnnounce: + """Tests for the :meth:`IrisRelease.bluesky_announce` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\nabcd1235 refs/tags/v1.0.1\n" + ) + + @pytest.mark.parametrize( + "first_in_series", [True, False], ids=["first_in_series", "not_first_in_series"] + ) + def test_wait(self, first_in_series: bool): + if first_in_series: + git_tag = "v1.1.0" + else: + git_tag = "v1.0.2" + self.instance.git_tag = git_tag + self.instance.bluesky_announce() + self.mock_wait_for_done.assert_called_once() + (call,) = self.mock_wait_for_done.call_args_list + assert_input_msg_regex(call, "Announce the release") + if not first_in_series: + assert_input_msg_regex(call, "Consider replying within an existing") + + +class TestMergeBack: + """Tests for the :meth:`IrisRelease.merge_back` method.""" + + class WaitMessages(enum.StrEnum): + DELETE = "avoid a name clash by deleting any existing local branch" + CHECKOUT = "Checkout a local branch from the official" + MERGE_IN = "Merge in the commits from {branch}" + TEMPLATE = "Recreate the What's New template" + LATEST = "Recreate the What's New latest" + GUIDANCE = r"Follow any guidance in .*latest\.rst" + INDEX = r"Add .*latest\.rst to the top of the list" + PUSH = "Commit and push all the What's New changes" + PR = "Create a Pull Request for your changes" + RISKY = "COMBINING BRANCHES CAN BE RISKY" + PR_MERGE = "Work with the development team to get the PR merged" + NEXT_PATCH = "Run the following command in a new terminal" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done, mock_git_ls_remote_tags): + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + mock_git_ls_remote_tags.return_value = ( + "abcd1234 refs/tags/v1.0.0\n" + "abcd1235 refs/tags/v1.1.0\n" + "abcd1236 refs/tags/v1.2.0\n" + ) + + @pytest.mark.parametrize( + "first", [True, False], ids=["first_in_series", "not_first_in_series"] + ) + @pytest.mark.parametrize( + "more_patches", [True, False], ids=["more_patches", "no_more_patches"] + ) + def test_waits(self, first, more_patches): + if first and more_patches: + pytest.skip( + "first_in_series and more_patches are mutually exclusive in reality." + ) + if first: + git_tag = "v1.3.0" + else: + git_tag = "v1.0.1" + self.instance.git_tag = git_tag + if more_patches: + self.instance.patch_min_max_tag = (git_tag, "v1.2.1") + + expected_messages = list(self.WaitMessages) + if not first: + expected_messages.remove(self.WaitMessages.TEMPLATE) + expected_messages.remove(self.WaitMessages.LATEST) + expected_messages.remove(self.WaitMessages.GUIDANCE) + expected_messages.remove(self.WaitMessages.INDEX) + expected_messages.remove(self.WaitMessages.PUSH) + if not more_patches: + expected_messages.remove(self.WaitMessages.NEXT_PATCH) + + self.instance.merge_back() + assert self.mock_wait_for_done.call_count == len(expected_messages) + for call, expected in zip( + self.mock_wait_for_done.call_args_list, + expected_messages, + ): + expected = expected.format(branch=re.escape(self.instance.version.branch)) + assert_input_msg_regex(call, expected) + + @pytest.mark.parametrize( + "more_patches", [True, False], ids=["more_patches", "no_more_patches"] + ) + def test_branches(self, more_patches): + self.instance.git_tag = "v1.0.1" + if more_patches: + self.instance.patch_min_max_tag = ("v1.0.1", "v1.2.1") + target_branch = "v1.1.x" + working_branch = "v1.0.1-to-v1.1.x" + else: + target_branch = "main" + working_branch = "v1.0.x.mergeback" + + self.instance.merge_back() + wait_messages = [ + call.args[0] for call in self.mock_wait_for_done.call_args_list + ] + # Use CHECKOUT as the test since it contains target_ and working_branch. + (checkout_message,) = [ + m for m in wait_messages if re.search(self.WaitMessages.CHECKOUT, m) + ] + pattern = re.compile(rf"git checkout .*{target_branch} -b {working_branch}") + assert pattern.search(checkout_message) is not None + + def test_next_series_error(self, mocker): + self.instance.git_tag = "v1.0.1" + self.instance.patch_min_max_tag = ("v1.0.1", "v1.2.1") + _ = mocker.patch.object( + IrisRelease, + "_get_tagged_versions", + return_value=[IrisVersion("v1.0.0")], + ) + with pytest.raises(RuntimeError, match="Error finding next minor_series"): + self.instance.merge_back() + + def test_next_patch_file(self): + self.instance.git_tag = "v1.0.1" + self.instance.patch_min_max_tag = ("v1.0.1", "v1.2.1") + expected_file = self.instance._get_file_stem().with_name("v1_1_1.json") + self.instance.merge_back() + assert expected_file.exists() + next_patch = IrisRelease.load(expected_file, dry_run=True) + assert ( + next_patch.latest_complete_step + == IrisRelease.get_steps().index(IrisRelease.validate) - 1 + ) + assert next_patch.git_tag == "v1.1.1" + assert next_patch.patch_min_max == ( + IrisVersion("v1.0.1"), + IrisVersion("v1.2.1"), + ) + + +class TestNextRelease: + """Tests for the :meth:`IrisRelease.next_release` method.""" + + @pytest.fixture(autouse=True) + def _setup(self, mock_wait_for_done) -> None: + self.instance = IrisRelease(_dry_run=True) + self.mock_wait_for_done = mock_wait_for_done + + @pytest.mark.parametrize("patch", [True, False], ids=["patch", "not_patch"]) + @pytest.mark.parametrize("rc", [True, False], ids=["rc", "not_rc"]) + def test_waits(self, patch: bool, rc: bool): + if patch: + git_tag = "v1.1.1" + else: + git_tag = "v1.2.0" + if rc: + git_tag += "rc0" + self.instance.git_tag = git_tag + self.instance.next_release() + if not patch and not rc: + assert self.mock_wait_for_done.call_count == 5 + manager, milestone, discussion, sprints, champion = ( + self.mock_wait_for_done.call_args_list + ) + message_fragments = [ + (manager, "Confirm that there is a release manager"), + (milestone, "has set up a milestone for their release"), + (discussion, "has set up a discussion page for their release"), + (sprints, "has arranged some team development time"), + (champion, "importance of regularly championing their release"), + ] + for call, expected in message_fragments: + assert_input_msg_regex(call, expected) + else: + self.mock_wait_for_done.assert_not_called() diff --git a/tools/update_lockfiles.py b/tools/update_lockfiles.py index a81ab8cafc..dd25649e3e 100755 --- a/tools/update_lockfiles.py +++ b/tools/update_lockfiles.py @@ -1,8 +1,10 @@ +#!/usr/bin/env python3 # Copyright Iris contributors # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" +"""Command line utility for generating conda-lock files for Iris' dependencies. + A command line utility for generating conda-lock files for the environments that nox uses for testing each different supported version of python. Typical usage: @@ -18,7 +20,6 @@ import sys from warnings import warn - message = ( "Iris' large requirements may require Mamba to successfully solve. If you " "don't want to install Mamba, consider using the workflow_dispatch on " @@ -37,10 +38,10 @@ "Iris Lockfile Generator", ) -parser.add_argument('files', nargs='+', - help="List of environment.yml files to lock") -parser.add_argument('--output-dir', '-o', default='.', - help="Directory to save output lock files") +parser.add_argument("files", nargs="+", help="List of environment.yml files to lock") +parser.add_argument( + "--output-dir", "-o", default=".", help="Directory to save output lock files" +) args = parser.parse_args() @@ -48,21 +49,29 @@ print(f"generating lockfile for {infile}", file=sys.stderr) fname = Path(infile).name - ftype = fname.split('.')[-1] - if ftype.lower() in ('yaml', 'yml'): - fname = '.'.join(fname.split('.')[:-1]) + ftype = fname.split(".")[-1] + if ftype.lower() in ("yaml", "yml"): + fname = ".".join(fname.split(".")[:-1]) # conda-lock --filename-template expects a string with a "...{platform}..." # placeholder in it, so we have to build the .lock filename without # using .format - ofile_template = Path(args.output_dir) / (fname+'-{platform}.lock') - subprocess.call([ - 'conda-lock', - 'lock', - '--filename-template', ofile_template, - '--file', infile, - '-k', 'explicit', - '--platform', 'linux-64' - ]) - print(f"lockfile saved to {ofile_template}".format(platform='linux-64'), - file=sys.stderr) + ofile_template = Path(args.output_dir) / (fname + "-{platform}.lock") + subprocess.call( + [ + "conda-lock", + "lock", + "--filename-template", + ofile_template, + "--file", + infile, + "-k", + "explicit", + "--platform", + "linux-64", + ] + ) + print( + f"lockfile saved to {ofile_template}".format(platform="linux-64"), + file=sys.stderr, + )