diff --git a/src/semantic_release/cli/config.py b/src/semantic_release/cli/config.py index 1d2057a48..37b86a811 100644 --- a/src/semantic_release/cli/config.py +++ b/src/semantic_release/cli/config.py @@ -39,6 +39,7 @@ from semantic_release.commit_parser import ( AngularCommitParser, CommitParser, + ConventionalCommitMonorepoParser, ConventionalCommitParser, EmojiCommitParser, ParseResult, @@ -71,9 +72,10 @@ class HvcsClient(str, Enum): GITEA = "gitea" -_known_commit_parsers: Dict[str, type[CommitParser]] = { - "conventional": ConventionalCommitParser, +_known_commit_parsers: dict[str, type[CommitParser[Any, Any]]] = { "angular": AngularCommitParser, + "conventional": ConventionalCommitParser, + "conventional-monorepo": ConventionalCommitMonorepoParser, "emoji": EmojiCommitParser, "scipy": ScipyCommitParser, "tag": TagCommitParser, diff --git a/src/semantic_release/commit_parser/__init__.py b/src/semantic_release/commit_parser/__init__.py index 740f4ae7f..15a96c176 100644 --- a/src/semantic_release/commit_parser/__init__.py +++ b/src/semantic_release/commit_parser/__init__.py @@ -7,6 +7,8 @@ AngularParserOptions, ) from semantic_release.commit_parser.conventional import ( + ConventionalCommitMonorepoParser, + ConventionalCommitMonorepoParserOptions, ConventionalCommitParser, ConventionalCommitParserOptions, ) @@ -28,3 +30,24 @@ ParseResult, ParseResultType, ) + +__all__ = [ + "CommitParser", + "ParserOptions", + "AngularCommitParser", + "AngularParserOptions", + "ConventionalCommitParser", + "ConventionalCommitParserOptions", + "ConventionalCommitMonorepoParser", + "ConventionalCommitMonorepoParserOptions", + "EmojiCommitParser", + "EmojiParserOptions", + "ScipyCommitParser", + "ScipyParserOptions", + "TagCommitParser", + "TagParserOptions", + "ParsedCommit", + "ParseError", + "ParseResult", + "ParseResultType", +] diff --git a/src/semantic_release/commit_parser/conventional/__init__.py b/src/semantic_release/commit_parser/conventional/__init__.py new file mode 100644 index 000000000..dd7d57d63 --- /dev/null +++ b/src/semantic_release/commit_parser/conventional/__init__.py @@ -0,0 +1,17 @@ +from semantic_release.commit_parser.conventional.options import ( + ConventionalCommitParserOptions, +) +from semantic_release.commit_parser.conventional.options_monorepo import ( + ConventionalCommitMonorepoParserOptions, +) +from semantic_release.commit_parser.conventional.parser import ConventionalCommitParser +from semantic_release.commit_parser.conventional.parser_monorepo import ( + ConventionalCommitMonorepoParser, +) + +__all__ = [ + "ConventionalCommitParser", + "ConventionalCommitParserOptions", + "ConventionalCommitMonorepoParser", + "ConventionalCommitMonorepoParserOptions", +] diff --git a/src/semantic_release/commit_parser/conventional/options.py b/src/semantic_release/commit_parser/conventional/options.py new file mode 100644 index 000000000..6bdb9739c --- /dev/null +++ b/src/semantic_release/commit_parser/conventional/options.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +from itertools import zip_longest +from typing import Tuple + +from pydantic.dataclasses import dataclass + +from semantic_release.commit_parser._base import ParserOptions +from semantic_release.enums import LevelBump + + +@dataclass +class ConventionalCommitParserOptions(ParserOptions): + """Options dataclass for the ConventionalCommitParser.""" + + minor_tags: Tuple[str, ...] = ("feat",) + """Commit-type prefixes that should result in a minor release bump.""" + + patch_tags: Tuple[str, ...] = ("fix", "perf") + """Commit-type prefixes that should result in a patch release bump.""" + + other_allowed_tags: Tuple[str, ...] = ( + "build", + "chore", + "ci", + "docs", + "style", + "refactor", + "test", + ) + """Commit-type prefixes that are allowed but do not result in a version bump.""" + + allowed_tags: Tuple[str, ...] = ( + *minor_tags, + *patch_tags, + *other_allowed_tags, + ) + """ + All commit-type prefixes that are allowed. + + These are used to identify a valid commit message. If a commit message does not start with + one of these prefixes, it will not be considered a valid commit message. + """ + + default_bump_level: LevelBump = LevelBump.NO_RELEASE + """The minimum bump level to apply to valid commit message.""" + + parse_squash_commits: bool = True + """Toggle flag for whether or not to parse squash commits""" + + ignore_merge_commits: bool = True + """Toggle flag for whether or not to ignore merge commits""" + + @property + def tag_to_level(self) -> dict[str, LevelBump]: + """A mapping of commit tags to the level bump they should result in.""" + return self._tag_to_level + + def __post_init__(self) -> None: + self._tag_to_level: dict[str, LevelBump] = { + str(tag): level + for tag, level in [ + # we have to do a type ignore as zip_longest provides a type that is not specific enough + # for our expected output. Due to the empty second array, we know the first is always longest + # and that means no values in the first entry of the tuples will ever be a LevelBump. We + # apply a str() to make mypy happy although it will never happen. + *zip_longest(self.allowed_tags, (), fillvalue=self.default_bump_level), + *zip_longest(self.patch_tags, (), fillvalue=LevelBump.PATCH), + *zip_longest(self.minor_tags, (), fillvalue=LevelBump.MINOR), + ] + if "|" not in str(tag) + } diff --git a/src/semantic_release/commit_parser/conventional/options_monorepo.py b/src/semantic_release/commit_parser/conventional/options_monorepo.py new file mode 100644 index 000000000..58bcaf47d --- /dev/null +++ b/src/semantic_release/commit_parser/conventional/options_monorepo.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +from pathlib import Path +from re import compile as regexp, error as RegExpError # noqa: N812 +from typing import TYPE_CHECKING, Any, Iterable, Tuple + +from pydantic import Field, field_validator +from pydantic.dataclasses import dataclass + +# typing_extensions is for Python 3.8, 3.9, 3.10 compatibility +from typing_extensions import Annotated + +from semantic_release.commit_parser.conventional.options import ( + ConventionalCommitParserOptions, +) + +if TYPE_CHECKING: # pragma: no cover + pass + + +@dataclass +class ConventionalCommitMonorepoParserOptions(ConventionalCommitParserOptions): + # TODO: add example into the docstring + """Options dataclass for ConventionalCommitMonorepoParser.""" + + path_filters: Annotated[Tuple[str, ...], Field(validate_default=True)] = (".",) + """ + A set of relative paths to filter commits by. Only commits with file changes that + match these file paths or its subdirectories will be considered valid commits. + + Syntax is similar to .gitignore with file path globs and inverse file match globs + via `!` prefix. Paths should be relative to the current working directory. + """ + + scope_prefix: str = "" + """ + A prefix that will be striped from the scope when parsing commit messages. + + If set, it will cause unscoped commits to be ignored. Use this in tandem with + the `path_filters` option to filter commits by directory and scope. This will + be fed into a regular expression so you must escape any special characters that + are meaningful in regular expressions (e.g. `.`, `*`, `?`, `+`, etc.) if you want + to match them literally. + """ + + @classmethod + @field_validator("path_filters", mode="before") + def convert_strs_to_paths(cls, value: Any) -> tuple[Path, ...]: + values = value if isinstance(value, Iterable) else [value] + results: list[Path] = [] + + for val in values: + if isinstance(val, (str, Path)): + results.append(Path(val)) + continue + + raise TypeError(f"Invalid type: {type(val)}, expected str or Path.") + + return tuple(results) + + @classmethod + @field_validator("path_filters", mode="after") + def resolve_path(cls, dir_paths: tuple[Path, ...]) -> tuple[Path, ...]: + return tuple( + ( + Path(f"!{Path(str_path[1:]).expanduser().absolute().resolve()}") + # maintains the negation prefix if it exists + if (str_path := str(path)).startswith("!") + # otherwise, resolve the path normally + else path.expanduser().absolute().resolve() + ) + for path in dir_paths + ) + + @classmethod + @field_validator("scope_prefix", mode="after") + def validate_scope_prefix(cls, scope_prefix: str) -> str: + if not scope_prefix: + return "" + + # Allow the special case of a plain wildcard although it's not a valid regex + if scope_prefix == "*": + return ".*" + + try: + regexp(scope_prefix) + except RegExpError as err: + raise ValueError(f"Invalid regex {scope_prefix!r}") from err + + return scope_prefix diff --git a/src/semantic_release/commit_parser/conventional.py b/src/semantic_release/commit_parser/conventional/parser.py similarity index 74% rename from src/semantic_release/commit_parser/conventional.py rename to src/semantic_release/commit_parser/conventional/parser.py index 3cd50d9c7..5cab34c56 100644 --- a/src/semantic_release/commit_parser/conventional.py +++ b/src/semantic_release/commit_parser/conventional/parser.py @@ -1,16 +1,25 @@ from __future__ import annotations -import re from functools import reduce -from itertools import zip_longest -from re import compile as regexp +from logging import getLogger +from re import ( + DOTALL, + IGNORECASE, + MULTILINE, + Match as RegexMatch, + Pattern, + compile as regexp, + error as RegexError, # noqa: N812 +) from textwrap import dedent -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING, ClassVar from git.objects.commit import Commit -from pydantic.dataclasses import dataclass -from semantic_release.commit_parser._base import CommitParser, ParserOptions +from semantic_release.commit_parser._base import CommitParser +from semantic_release.commit_parser.conventional.options import ( + ConventionalCommitParserOptions, +) from semantic_release.commit_parser.token import ( ParsedCommit, ParsedMessageResult, @@ -25,16 +34,10 @@ ) from semantic_release.enums import LevelBump from semantic_release.errors import InvalidParserOptions -from semantic_release.globals import logger from semantic_release.helpers import sort_numerically, text_reducer -if TYPE_CHECKING: # pragma: no cover - from git.objects.commit import Commit - - -def _logged_parse_error(commit: Commit, error: str) -> ParseError: - logger.debug(error) - return ParseError(commit, error=error) +if TYPE_CHECKING: + pass # TODO: Remove from here, allow for user customization instead via options @@ -53,69 +56,6 @@ def _logged_parse_error(commit: Commit, error: str) -> ParseError: } -@dataclass -class ConventionalCommitParserOptions(ParserOptions): - """Options dataclass for the ConventionalCommitParser.""" - - minor_tags: Tuple[str, ...] = ("feat",) - """Commit-type prefixes that should result in a minor release bump.""" - - patch_tags: Tuple[str, ...] = ("fix", "perf") - """Commit-type prefixes that should result in a patch release bump.""" - - other_allowed_tags: Tuple[str, ...] = ( - "build", - "chore", - "ci", - "docs", - "style", - "refactor", - "test", - ) - """Commit-type prefixes that are allowed but do not result in a version bump.""" - - allowed_tags: Tuple[str, ...] = ( - *minor_tags, - *patch_tags, - *other_allowed_tags, - ) - """ - All commit-type prefixes that are allowed. - - These are used to identify a valid commit message. If a commit message does not start with - one of these prefixes, it will not be considered a valid commit message. - """ - - default_bump_level: LevelBump = LevelBump.NO_RELEASE - """The minimum bump level to apply to valid commit message.""" - - parse_squash_commits: bool = True - """Toggle flag for whether or not to parse squash commits""" - - ignore_merge_commits: bool = True - """Toggle flag for whether or not to ignore merge commits""" - - @property - def tag_to_level(self) -> dict[str, LevelBump]: - """A mapping of commit tags to the level bump they should result in.""" - return self._tag_to_level - - def __post_init__(self) -> None: - self._tag_to_level: dict[str, LevelBump] = { - str(tag): level - for tag, level in [ - # we have to do a type ignore as zip_longest provides a type that is not specific enough - # for our expected output. Due to the empty second array, we know the first is always longest - # and that means no values in the first entry of the tuples will ever be a LevelBump. We - # apply a str() to make mypy happy although it will never happen. - *zip_longest(self.allowed_tags, (), fillvalue=self.default_bump_level), - *zip_longest(self.patch_tags, (), fillvalue=LevelBump.PATCH), - *zip_longest(self.minor_tags, (), fillvalue=LevelBump.MINOR), - ] - if "|" not in str(tag) - } - - class ConventionalCommitParser( CommitParser[ParseResult, ConventionalCommitParserOptions] ): @@ -128,14 +68,57 @@ class ConventionalCommitParser( # TODO: Deprecate in lieu of get_default_options() parser_options = ConventionalCommitParserOptions + # GitHub & Gitea use (#123), GitLab uses (!123), and BitBucket uses (pull request #123) + mr_selector = regexp(r"[\t ]+\((?:pull request )?(?P[#!]\d+)\)[\t ]*$") + + issue_selector = regexp( + str.join( + "", + [ + r"^(?:clos(?:e|es|ed|ing)|fix(?:es|ed|ing)?|resolv(?:e|es|ed|ing)|implement(?:s|ed|ing)?):", + r"[\t ]+(?P.+)[\t ]*$", + ], + ), + flags=MULTILINE | IGNORECASE, + ) + + notice_selector = regexp(r"^NOTICE: (?P.+)$") + + common_commit_msg_filters: ClassVar[dict[str, tuple[Pattern[str], str]]] = { + "typo-extra-spaces": (regexp(r"(\S) +(\S)"), r"\1 \2"), + "git-header-commit": ( + regexp(r"^[\t ]*commit [0-9a-f]+$\n?", flags=MULTILINE), + "", + ), + "git-header-author": ( + regexp(r"^[\t ]*Author: .+$\n?", flags=MULTILINE), + "", + ), + "git-header-date": ( + regexp(r"^[\t ]*Date: .+$\n?", flags=MULTILINE), + "", + ), + "git-squash-heading": ( + regexp( + r"^[\t ]*Squashed commit of the following:.*$\n?", + flags=MULTILINE, + ), + "", + ), + } + def __init__(self, options: ConventionalCommitParserOptions | None = None) -> None: super().__init__(options) + self._logger = getLogger( + str.join(".", [self.__module__, self.__class__.__name__]) + ) + try: commit_type_pattern = regexp( r"(?P%s)" % str.join("|", self.options.allowed_tags) ) - except re.error as err: + except RegexError as err: raise InvalidParserOptions( str.join( "\n", @@ -167,45 +150,11 @@ def __init__(self, options: ConventionalCommitParserOptions | None = None) -> No r"(?:\n\n(?P.+))?", # commit body ], ), - flags=re.DOTALL, + flags=DOTALL, ) - # GitHub & Gitea use (#123), GitLab uses (!123), and BitBucket uses (pull request #123) - self.mr_selector = regexp( - r"[\t ]+\((?:pull request )?(?P[#!]\d+)\)[\t ]*$" - ) - self.issue_selector = regexp( - str.join( - "", - [ - r"^(?:clos(?:e|es|ed|ing)|fix(?:es|ed|ing)?|resolv(?:e|es|ed|ing)|implement(?:s|ed|ing)?):", - r"[\t ]+(?P.+)[\t ]*$", - ], - ), - flags=re.MULTILINE | re.IGNORECASE, - ) - self.notice_selector = regexp(r"^NOTICE: (?P.+)$") - self.filters = { - "typo-extra-spaces": (regexp(r"(\S) +(\S)"), r"\1 \2"), - "git-header-commit": ( - regexp(r"^[\t ]*commit [0-9a-f]+$\n?", flags=re.MULTILINE), - "", - ), - "git-header-author": ( - regexp(r"^[\t ]*Author: .+$\n?", flags=re.MULTILINE), - "", - ), - "git-header-date": ( - regexp(r"^[\t ]*Date: .+$\n?", flags=re.MULTILINE), - "", - ), - "git-squash-heading": ( - regexp( - r"^[\t ]*Squashed commit of the following:.*$\n?", - flags=re.MULTILINE, - ), - "", - ), + self.filters: dict[str, tuple[Pattern[str], str]] = { + **self.common_commit_msg_filters, "git-squash-commit-prefix": ( regexp( str.join( @@ -215,17 +164,20 @@ def __init__(self, options: ConventionalCommitParserOptions | None = None) -> No commit_type_pattern.pattern + r"\b", # prior to commit type ], ), - flags=re.MULTILINE, + flags=MULTILINE, ), # move commit type to the start of the line r"\1", ), } - @staticmethod - def get_default_options() -> ConventionalCommitParserOptions: + def get_default_options(self) -> ConventionalCommitParserOptions: return ConventionalCommitParserOptions() + def log_parse_error(self, commit: Commit, error: str) -> ParseError: + self._logger.debug(error) + return ParseError(commit, error=error) + def commit_body_components_separator( self, accumulator: dict[str, list[str]], text: str ) -> dict[str, list[str]]: @@ -267,14 +219,20 @@ def commit_body_components_separator( return accumulator def parse_message(self, message: str) -> ParsedMessageResult | None: - if not (parsed := self.commit_msg_pattern.match(message)): - return None + return ( + self.create_parsed_message_result(match) + if (match := self.commit_msg_pattern.match(message)) + else None + ) - parsed_break = parsed.group("break") - parsed_scope = parsed.group("scope") or "" - parsed_subject = parsed.group("subject") - parsed_text = parsed.group("text") - parsed_type = parsed.group("type") + def create_parsed_message_result( + self, match: RegexMatch[str] + ) -> ParsedMessageResult: + parsed_break = match.group("break") + parsed_scope = match.group("scope") or "" + parsed_subject = match.group("subject") + parsed_text = match.group("text") + parsed_type = match.group("type") linked_merge_request = "" if mr_match := self.mr_selector.search(parsed_subject): @@ -322,7 +280,7 @@ def is_merge_commit(commit: Commit) -> bool: def parse_commit(self, commit: Commit) -> ParseResult: if not (parsed_msg_result := self.parse_message(force_str(commit.message))): - return _logged_parse_error( + return self.log_parse_error( commit, f"Unable to parse commit message: {commit.message!r}", ) @@ -342,7 +300,7 @@ def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: will be returned as a list of a single ParseResult. """ if self.options.ignore_merge_commits and self.is_merge_commit(commit): - return _logged_parse_error( + return self.log_parse_error( commit, "Ignoring merge commit: %s" % commit.hexsha[:8] ) diff --git a/src/semantic_release/commit_parser/conventional/parser_monorepo.py b/src/semantic_release/commit_parser/conventional/parser_monorepo.py new file mode 100644 index 000000000..ad0053399 --- /dev/null +++ b/src/semantic_release/commit_parser/conventional/parser_monorepo.py @@ -0,0 +1,469 @@ +from __future__ import annotations + +import os +from fnmatch import fnmatch +from logging import getLogger +from pathlib import Path, PurePath, PurePosixPath, PureWindowsPath +from re import DOTALL, compile as regexp, error as RegexError # noqa: N812 +from typing import TYPE_CHECKING + +from semantic_release.commit_parser._base import CommitParser +from semantic_release.commit_parser.conventional.options import ( + ConventionalCommitParserOptions, +) +from semantic_release.commit_parser.conventional.options_monorepo import ( + ConventionalCommitMonorepoParserOptions, +) +from semantic_release.commit_parser.conventional.parser import ConventionalCommitParser +from semantic_release.commit_parser.token import ( + ParsedCommit, + ParsedMessageResult, + ParseError, + ParseResult, +) +from semantic_release.commit_parser.util import force_str +from semantic_release.errors import InvalidParserOptions + +if TYPE_CHECKING: # pragma: no cover + from git.objects.commit import Commit + + +class ConventionalCommitMonorepoParser( + CommitParser[ParseResult, ConventionalCommitMonorepoParserOptions] +): + # TODO: Remove for v11 compatibility, get_default_options() will be called instead + parser_options = ConventionalCommitMonorepoParserOptions + + def __init__( + self, options: ConventionalCommitMonorepoParserOptions | None = None + ) -> None: + super().__init__(options) + + try: + commit_scope_pattern = regexp( + r"\(" + self.options.scope_prefix + r"(?P[^\n]+)?\)", + ) + except RegexError as err: + raise InvalidParserOptions( + str.join( + "\n", + [ + f"Invalid options for {self.__class__.__name__}", + "Unable to create regular expression from configured scope_prefix.", + "Please check the configured scope_prefix and remove or escape any regular expression characters.", + ], + ) + ) from err + + try: + commit_type_pattern = regexp( + r"(?P%s)" % str.join("|", self.options.allowed_tags) + ) + except RegexError as err: + raise InvalidParserOptions( + str.join( + "\n", + [ + f"Invalid options for {self.__class__.__name__}", + "Unable to create regular expression from configured commit-types.", + "Please check the configured commit-types and remove or escape any regular expression characters.", + ], + ) + ) from err + + # This regular expression includes scope prefix into the pattern and forces a scope to be present + # PSR will match the full scope but we don't include it in the scope match, + # which implicitly strips it from being included in the returned scope. + self._strict_scope_pattern = regexp( + str.join( + "", + [ + r"^" + commit_type_pattern.pattern, + commit_scope_pattern.pattern, + r"(?P!)?:\s+", + r"(?P[^\n]+)", + r"(?:\n\n(?P.+))?", # commit body + ], + ), + flags=DOTALL, + ) + + self._optional_scope_pattern = regexp( + str.join( + "", + [ + r"^" + commit_type_pattern.pattern, + r"(?:\((?P[^\n]+)\))?", + r"(?P!)?:\s+", + r"(?P[^\n]+)", + r"(?:\n\n(?P.+))?", # commit body + ], + ), + flags=DOTALL, + ) + + file_select_filters, file_ignore_filters = self._process_path_filter_options( + self.options.path_filters + ) + self._file_selection_filters: list[str] = file_select_filters + self._file_ignore_filters: list[str] = file_ignore_filters + + self._logger = getLogger( + str.join(".", [self.__module__, self.__class__.__name__]) + ) + + self._base_parser = ConventionalCommitParser( + options=ConventionalCommitParserOptions( + **{ + k: getattr(self.options, k) + for k in ConventionalCommitParserOptions().__dataclass_fields__ + } + ) + ) + + def get_default_options(self) -> ConventionalCommitMonorepoParserOptions: + return ConventionalCommitMonorepoParserOptions() + + @staticmethod + def _process_path_filter_options( # noqa: C901 + path_filters: tuple[str, ...], + ) -> tuple[list[str], list[str]]: + file_ignore_filters: list[str] = [] + file_selection_filters: list[str] = [] + unique_selection_filters: set[str] = set() + unique_ignore_filters: set[str] = set() + + for str_path in path_filters: + str_filter = str_path[1:] if str_path.startswith("!") else str_path + filter_list = ( + file_ignore_filters + if str_path.startswith("!") + else file_selection_filters + ) + unique_cache = ( + unique_ignore_filters + if str_path.startswith("!") + else unique_selection_filters + ) + + # Since fnmatch is not too flexible, we will expand the path filters to include the name and any subdirectories + # as this is how gitignore is interpreted. Possible scenarios: + # | Input | Path Normalization | Filter List | + # | ---------- | ------------------ | ------------------------- | + # | / | / | /** | done + # | /./ | / | /** | done + # | /** | /** | /** | done + # | /./** | /** | /** | done + # | /* | /* | /* | done + # | . | . | ./** | done + # | ./ | . | ./** | done + # | ././ | . | ./** | done + # | ./** | ./** | ./** | done + # | ./* | ./* | ./* | done + # | .. | .. | ../** | done + # | ../ | .. | ../** | done + # | ../** | ../** | ../** | done + # | ../* | ../* | ../* | done + # | ../.. | ../.. | ../../** | done + # | ../../ | ../../ | ../../** | done + # | ../../docs | ../../docs | ../../docs, ../../docs/** | done + # | src | src | src, src/** | done + # | src/ | src | src/** | done + # | src/* | src/* | src/* | done + # | src/** | src/** | src/** | done + # | /src | /src | /src, /src/** | done + # | /src/ | /src | /src/** | done + # | /src/** | /src/** | /src/** | done + # | /src/* | /src/* | /src/* | done + # | ../d/f.txt | ../d/f.txt | ../d/f.txt, ../d/f.txt/** | done + # This expansion will occur regardless of the negation prefix + + os_path: PurePath | PurePosixPath | PureWindowsPath = PurePath(str_filter) + + if r"\\" in str_filter: + # Windows paths were given so we convert them to posix paths + os_path = PureWindowsPath(str_filter) + os_path = ( + PureWindowsPath( + os_path.root, *os_path.parts[1:] + ) # drop any drive letter + if os_path.is_absolute() + else os_path + ) + os_path = PurePosixPath(os_path.as_posix()) + + path_normalized = str(os_path) + if path_normalized == str( + Path(".").absolute().root + ) or path_normalized == str(Path("/**")): + path_normalized = "/**" + + elif path_normalized == str(Path("/*")): + pass + + elif path_normalized == str(Path(".")) or path_normalized == str( + Path("./**") + ): + path_normalized = "./**" + + elif path_normalized == str(Path("./*")): + path_normalized = "./*" + + elif path_normalized == str(Path("..")) or path_normalized == str( + Path("../**") + ): + path_normalized = "../**" + + elif path_normalized == str(Path("../*")): + path_normalized = "../*" + + elif path_normalized.endswith(("..", "../**")): + path_normalized = f"{path_normalized.rstrip('*')}/**" + + elif str_filter.endswith(os.sep): + # If the path ends with a separator, it is a directory, so we add the directory and all subdirectories + path_normalized = f"{path_normalized}/**" + + elif not path_normalized.endswith("*"): + all_subdirs = f"{path_normalized}/**" + if all_subdirs not in unique_cache: + unique_cache.add(all_subdirs) + filter_list.append(all_subdirs) + # And fall through to add the path as is + + # END IF + + # Add the normalized path to the filter list if it is not already present + if path_normalized not in unique_cache: + unique_cache.add(path_normalized) + filter_list.append(path_normalized) + + return file_selection_filters, file_ignore_filters + + def logged_parse_error(self, commit: Commit, error: str) -> ParseError: + self._logger.debug(error) + return ParseError(commit, error=error) + + def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: + if self.options.ignore_merge_commits and self._base_parser.is_merge_commit( + commit + ): + return self._base_parser.log_parse_error( + commit, "Ignoring merge commit: %s" % commit.hexsha[:8] + ) + + separate_commits: list[Commit] = ( + self._base_parser.unsquash_commit(commit) + if self.options.parse_squash_commits + else [commit] + ) + + # Parse each commit individually if there were more than one + parsed_commits: list[ParseResult] = list( + map(self.parse_commit, separate_commits) + ) + + def add_linked_merge_request( + parsed_result: ParseResult, mr_number: str + ) -> ParseResult: + return ( + parsed_result + if not isinstance(parsed_result, ParsedCommit) + else ParsedCommit( + **{ + **parsed_result._asdict(), + "linked_merge_request": mr_number, + } + ) + ) + + # TODO: improve this for other VCS systems other than GitHub & BitBucket + # Github works as the first commit in a squash merge commit has the PR number + # appended to the first line of the commit message + lead_commit = next(iter(parsed_commits)) + + if isinstance(lead_commit, ParsedCommit) and lead_commit.linked_merge_request: + # If the first commit has linked merge requests, assume all commits + # are part of the same PR and add the linked merge requests to all + # parsed commits + parsed_commits = [ + lead_commit, + *map( + lambda parsed_result, mr=lead_commit.linked_merge_request: ( # type: ignore[misc] + add_linked_merge_request(parsed_result, mr) + ), + parsed_commits[1:], + ), + ] + + elif isinstance(lead_commit, ParseError) and ( + mr_match := self._base_parser.mr_selector.search( + force_str(lead_commit.message) + ) + ): + # Handle BitBucket Squash Merge Commits (see #1085), which have non angular commit + # format but include the PR number in the commit subject that we want to extract + linked_merge_request = mr_match.group("mr_number") + + # apply the linked MR to all commits + parsed_commits = [ + add_linked_merge_request(parsed_result, linked_merge_request) + for parsed_result in parsed_commits + ] + + return parsed_commits + + def parse_message( + self, message: str, strict_scope: bool = False + ) -> ParsedMessageResult | None: + if ( + not (parsed_match := self._strict_scope_pattern.match(message)) + and strict_scope + ): + return None + + if not parsed_match and not ( + parsed_match := self._optional_scope_pattern.match(message) + ): + return None + + return self._base_parser.create_parsed_message_result(parsed_match) + + def parse_commit(self, commit: Commit) -> ParseResult: + """Attempt to parse the commit message with a regular expression into a ParseResult.""" + # Multiple scenarios to consider when parsing a commit message [Truth table]: + # ======================================================================================================= + # | || INPUTS || | + # | # ||------------------------+----------------+--------------|| Result | + # | || Example Commit Message | Relevant Files | Scope Prefix || | + # |----||------------------------+----------------+--------------||-------------------------------------| + # | 1 || type(prefix-cli): msg | yes | "prefix-" || ParsedCommit | + # | 2 || type(prefix-cli): msg | yes | "" || ParsedCommit | + # | 3 || type(prefix-cli): msg | no | "prefix-" || ParsedCommit | + # | 4 || type(prefix-cli): msg | no | "" || ParseError[No files] | + # | 5 || type(scope-cli): msg | yes | "prefix-" || ParsedCommit | + # | 6 || type(scope-cli): msg | yes | "" || ParsedCommit | + # | 7 || type(scope-cli): msg | no | "prefix-" || ParseError[No files & wrong scope] | + # | 8 || type(scope-cli): msg | no | "" || ParseError[No files] | + # | 9 || type(cli): msg | yes | "prefix-" || ParsedCommit | + # | 10 || type(cli): msg | yes | "" || ParsedCommit | + # | 11 || type(cli): msg | no | "prefix-" || ParseError[No files & wrong scope] | + # | 12 || type(cli): msg | no | "" || ParseError[No files] | + # | 13 || type: msg | yes | "prefix-" || ParsedCommit | + # | 14 || type: msg | yes | "" || ParsedCommit | + # | 15 || type: msg | no | "prefix-" || ParseError[No files & wrong scope] | + # | 16 || type: msg | no | "" || ParseError[No files] | + # | 17 || non-conventional msg | yes | "prefix-" || ParseError[Invalid Syntax] | + # | 18 || non-conventional msg | yes | "" || ParseError[Invalid Syntax] | + # | 19 || non-conventional msg | no | "prefix-" || ParseError[Invalid Syntax] | + # | 20 || non-conventional msg | no | "" || ParseError[Invalid Syntax] | + # ======================================================================================================= + + # Initial Logic Flow: + # [1] When there are no relevant files and a scope prefix is defined, we enforce a strict scope + # [2] When there are no relevant files and no scope prefix is defined, we parse scoped or unscoped commits + # [3] When there are relevant files, we parse scoped or unscoped commits regardless of any defined prefix + has_relevant_changed_files = self._has_relevant_changed_files(commit) + strict_scope = bool( + not has_relevant_changed_files and self.options.scope_prefix + ) + pmsg_result = self.parse_message( + message=force_str(commit.message), + strict_scope=strict_scope, + ) + + if pmsg_result and (has_relevant_changed_files or strict_scope): + self._logger.debug( + "commit %s introduces a %s level_bump", + commit.hexsha[:8], + pmsg_result.bump, + ) + + return ParsedCommit.from_parsed_message_result(commit, pmsg_result) + + if pmsg_result and not has_relevant_changed_files: + return self.logged_parse_error( + commit, + f"Commit {commit.hexsha[:7]} has no changed files matching the path filter(s)", + ) + + if strict_scope and self.parse_message(str(commit.message), strict_scope=False): + return self.logged_parse_error( + commit, + str.join( + " and ", + [ + f"Commit {commit.hexsha[:7]} has no changed files matching the path filter(s)", + f"the scope does not match scope prefix '{self.options.scope_prefix}'", + ], + ), + ) + + return self.logged_parse_error( + commit, + f"Format Mismatch! Unable to parse commit message: {commit.message!r}", + ) + + def unsquash_commit_message(self, message: str) -> list[str]: + return self._base_parser.unsquash_commit_message(message) + + def _has_relevant_changed_files(self, commit: Commit) -> bool: + # Extract git root from commit + git_root = ( + Path(commit.repo.working_tree_dir or commit.repo.working_dir) + .absolute() + .resolve() + ) + + rel_cwd = ( + Path.cwd().relative_to(git_root) + if Path.cwd().is_relative_to(git_root) + else Path(".") + ) + + sandboxed_selection_filters: list[str] = [ + str(file_filter) + for file_filter in ( + ( + git_root / select_filter.rstrip("/") + if Path(select_filter).is_absolute() + else git_root / rel_cwd / select_filter + ) + for select_filter in self._file_selection_filters + ) + if file_filter.is_relative_to(git_root) + ] + + sandboxed_ignore_filters: list[str] = [ + str(file_filter) + for file_filter in ( + ( + git_root / ignore_filter.rstrip("/") + if Path(ignore_filter).is_absolute() + else git_root / rel_cwd / ignore_filter + ) + for ignore_filter in self._file_ignore_filters + ) + if file_filter.is_relative_to(git_root) + ] + + # Check if the changed files of the commit that match the path filters + for full_path in iter( + str(git_root / rel_git_path) for rel_git_path in commit.stats.files + ): + # Check if the filepath matches any of the file selection filters + if not any( + fnmatch(full_path, select_filter) + for select_filter in sandboxed_selection_filters + ): + continue + + # Pass filter matches, so now evaluate if it is supposed to be ignored + if not any( + fnmatch(full_path, ignore_filter) + for ignore_filter in sandboxed_ignore_filters + ): + # No ignore filter matched, so it must be a relevant file + return True + + return False diff --git a/src/semantic_release/helpers.py b/src/semantic_release/helpers.py index c50369575..f6bae05ea 100644 --- a/src/semantic_release/helpers.py +++ b/src/semantic_release/helpers.py @@ -8,7 +8,7 @@ from functools import lru_cache, reduce, wraps from pathlib import Path, PurePosixPath from re import IGNORECASE, compile as regexp -from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Sequence, TypeVar +from typing import TYPE_CHECKING, Callable, NamedTuple, TypeVar from urllib.parse import urlsplit from semantic_release.globals import logger @@ -16,7 +16,7 @@ if TYPE_CHECKING: # pragma: no cover from logging import Logger from re import Pattern - from typing import Iterable + from typing import Any, Iterable, Sequence number_pattern = regexp(r"(?P\S*?)(?P\d[\d,]*)\b") @@ -94,7 +94,7 @@ def text_reducer(text: str, filter_pair: tuple[Pattern[str], str]) -> str: def validate_types_in_sequence( - sequence: Sequence, types: type | tuple[type, ...] + sequence: Sequence[Any], types: type | tuple[type, ...] ) -> bool: """Validate that all elements in a sequence are of a specific type""" return all(isinstance(item, types) for item in sequence) diff --git a/src/semantic_release/version/algorithm.py b/src/semantic_release/version/algorithm.py index fa24e3fa1..0057cfcce 100644 --- a/src/semantic_release/version/algorithm.py +++ b/src/semantic_release/version/algorithm.py @@ -346,24 +346,7 @@ def next_version( # Step 5. apply the parser to each commit in the history (could return multiple results per commit) parsed_results = list(map(commit_parser.parse, commits_since_last_release)) - # Step 5A. Validation type check for the parser results (important because of possible custom parsers) - for parsed_result in parsed_results: - if not any( - ( - isinstance(parsed_result, (ParseError, ParsedCommit)), - type(parsed_result) == list - and validate_types_in_sequence( - parsed_result, (ParseError, ParsedCommit) - ), - type(parsed_result) == tuple - and validate_types_in_sequence( - parsed_result, (ParseError, ParsedCommit) - ), - ) - ): - raise TypeError("Unexpected type returned from commit_parser.parse") - - # Step 5B. Accumulate all parsed results into a single list accounting for possible multiple results per commit + # Step 5A. Accumulate all parsed results into a single list accounting for possible multiple results per commit consolidated_results: list[ParseResult] = reduce( lambda accumulated_results, p_results: [ *accumulated_results, @@ -378,6 +361,10 @@ def next_version( [], ) + # Step 5B. Validation type check for the parser results (important because of possible custom parsers) + if not validate_types_in_sequence(consolidated_results, (ParseError, ParsedCommit)): + raise TypeError("Unexpected type returned from commit_parser.parse") + # Step 5C. Parse the commits to determine the bump level that should be applied parsed_levels: set[LevelBump] = { parsed_result.bump # type: ignore[union-attr] # too complex for type checkers diff --git a/tests/conftest.py b/tests/conftest.py index 2d081f62b..880f845e8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,6 +17,8 @@ from filelock import FileLock from git import Commit, Repo +from semantic_release.version.version import Version + from tests.const import PROJ_DIR from tests.fixtures import * from tests.util import copy_dir_tree, remove_dir_tree @@ -335,6 +337,10 @@ def set_cached_repo_data(request: pytest.FixtureRequest) -> SetCachedRepoDataFn: def magic_serializer(obj: Any) -> Any: if isinstance(obj, Path): return obj.__fspath__() + + if isinstance(obj, Version): + return obj.__dict__ + return obj def _set_cached_repo_data(proj_dirname: str, data: RepoData) -> None: diff --git a/tests/const.py b/tests/const.py index c7cc0a8b4..69a7ca778 100644 --- a/tests/const.py +++ b/tests/const.py @@ -11,6 +11,9 @@ class RepoActionStep(str, Enum): CONFIGURE = "CONFIGURE" + CONFIGURE_MONOREPO = "CONFIGURE_MONOREPO" + CREATE_MONOREPO = "CREATE_MONOREPO" + CHANGE_DIRECTORY = "CHANGE_DIRECTORY" WRITE_CHANGELOGS = "WRITE_CHANGELOGS" GIT_CHECKOUT = "GIT_CHECKOUT" GIT_COMMIT = "GIT_COMMIT" diff --git a/tests/e2e/cmd_changelog/test_changelog.py b/tests/e2e/cmd_changelog/test_changelog.py index edc2a8c63..3f3bb56da 100644 --- a/tests/e2e/cmd_changelog/test_changelog.py +++ b/tests/e2e/cmd_changelog/test_changelog.py @@ -77,6 +77,12 @@ from requests_mock import Mocker + from semantic_release.commit_parser.conventional.parser import ( + ConventionalCommitParser, + ) + from semantic_release.commit_parser.emoji import EmojiCommitParser + from semantic_release.commit_parser.scipy import ScipyCommitParser + from tests.conftest import RunCliFn from tests.e2e.conftest import RetrieveRuntimeContextFn from tests.fixtures.example_project import ( @@ -867,9 +873,12 @@ def test_changelog_update_mode_unreleased_n_released( commit_n_rtn_changelog_entry: CommitNReturnChangelogEntryFn, changelog_file: Path, insertion_flag: str, - get_commit_def_of_conventional_commit: GetCommitDefFn, - get_commit_def_of_emoji_commit: GetCommitDefFn, - get_commit_def_of_scipy_commit: GetCommitDefFn, + get_commit_def_of_conventional_commit: GetCommitDefFn[ConventionalCommitParser], + get_commit_def_of_emoji_commit: GetCommitDefFn[EmojiCommitParser], + get_commit_def_of_scipy_commit: GetCommitDefFn[ScipyCommitParser], + default_conventional_parser: ConventionalCommitParser, + default_emoji_parser: EmojiCommitParser, + default_scipy_parser: ScipyCommitParser, ): """ Given there are unreleased changes and a previous release in the changelog, @@ -890,18 +899,23 @@ def test_changelog_update_mode_unreleased_n_released( commit_n_section: Commit2Section = { "conventional": { "commit": get_commit_def_of_conventional_commit( - "perf: improve the performance of the application" + "perf: improve the performance of the application", + parser=default_conventional_parser, ), "section": "Performance Improvements", }, "emoji": { "commit": get_commit_def_of_emoji_commit( - ":zap: improve the performance of the application" + ":zap: improve the performance of the application", + parser=default_emoji_parser, ), "section": ":zap:", }, "scipy": { - "commit": get_commit_def_of_scipy_commit("MAINT: fix an issue"), + "commit": get_commit_def_of_scipy_commit( + "MAINT: fix an issue", + parser=default_scipy_parser, + ), "section": "Fix", }, } diff --git a/tests/e2e/cmd_version/bump_version/conftest.py b/tests/e2e/cmd_version/bump_version/conftest.py index da36ff1d2..71863876b 100644 --- a/tests/e2e/cmd_version/bump_version/conftest.py +++ b/tests/e2e/cmd_version/bump_version/conftest.py @@ -12,19 +12,29 @@ if TYPE_CHECKING: from pathlib import Path - from typing import Protocol + from typing import Protocol, Sequence from click.testing import Result from tests.conftest import RunCliFn from tests.fixtures.example_project import UpdatePyprojectTomlFn - from tests.fixtures.git_repo import BuildRepoFromDefinitionFn, RepoActionConfigure + from tests.fixtures.git_repo import ( + BuildRepoFromDefinitionFn, + RepoActionConfigure, + RepoActionConfigureMonorepo, + RepoActionCreateMonorepo, + ) class InitMirrorRepo4RebuildFn(Protocol): def __call__( self, mirror_repo_dir: Path, - configuration_step: RepoActionConfigure, + configuration_steps: Sequence[ + RepoActionConfigure + | RepoActionCreateMonorepo + | RepoActionConfigureMonorepo + ], + files_to_remove: Sequence[Path], ) -> Path: ... class RunPSReleaseFn(Protocol): @@ -32,18 +42,20 @@ def __call__( self, next_version_str: str, git_repo: Repo, + config_toml_path: Path = ..., ) -> Result: ... @pytest.fixture(scope="session") def init_mirror_repo_for_rebuild( build_repo_from_definition: BuildRepoFromDefinitionFn, - changelog_md_file: Path, - changelog_rst_file: Path, ) -> InitMirrorRepo4RebuildFn: def _init_mirror_repo_for_rebuild( mirror_repo_dir: Path, - configuration_step: RepoActionConfigure, + configuration_steps: Sequence[ + RepoActionConfigure | RepoActionCreateMonorepo | RepoActionConfigureMonorepo + ], + files_to_remove: Sequence[Path], ) -> Path: # Create the mirror repo directory mirror_repo_dir.mkdir(exist_ok=True, parents=True) @@ -51,13 +63,23 @@ def _init_mirror_repo_for_rebuild( # Initialize mirror repository build_repo_from_definition( dest_dir=mirror_repo_dir, - repo_construction_steps=[configuration_step], + repo_construction_steps=configuration_steps, ) with Repo(mirror_repo_dir) as mirror_git_repo: - # remove the default changelog files to enable Update Mode (new default of v10) - mirror_git_repo.git.rm(str(changelog_md_file), force=True) - mirror_git_repo.git.rm(str(changelog_rst_file), force=True) + for filepath in files_to_remove: + file = ( + (mirror_git_repo.working_dir / filepath).resolve().absolute() + if not filepath.is_absolute() + else filepath + ) + if ( + not file.is_relative_to(mirror_git_repo.working_dir) + or not file.exists() + ): + continue + + mirror_git_repo.git.rm(str(file), force=True) return mirror_repo_dir @@ -69,6 +91,7 @@ def run_psr_release( run_cli: RunCliFn, changelog_rst_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, + pyproject_toml_file: Path, ) -> RunPSReleaseFn: base_version_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD] write_changelog_only_cmd = [ @@ -82,6 +105,7 @@ def run_psr_release( def _run_psr_release( next_version_str: str, git_repo: Repo, + config_toml_path: Path = pyproject_toml_file, ) -> Result: version_n_buildmeta = next_version_str.split("+", maxsplit=1) version_n_prerelease = version_n_buildmeta[0].split("-", maxsplit=1) @@ -107,6 +131,7 @@ def _run_psr_release( update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_rst_file), + toml_file=config_toml_path, ) cli_cmd = [*write_changelog_only_cmd, *prerelease_args, *build_metadata_args] result = run_cli(cli_cmd[1:], env={Github.DEFAULT_ENV_TOKEN_NAME: "1234"}) @@ -116,7 +141,7 @@ def _run_psr_release( git_repo.git.reset("--mixed", "HEAD") # Add the changelog file to the git index but reset the working directory - git_repo.git.add(str(changelog_rst_file)) + git_repo.git.add(str(changelog_rst_file.resolve())) git_repo.git.checkout("--", ".") # Actual run to release & write the MD changelog diff --git a/tests/e2e/cmd_version/bump_version/github_flow_monorepo/__init__.py b/tests/e2e/cmd_version/bump_version/github_flow_monorepo/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/e2e/cmd_version/bump_version/github_flow_monorepo/test_monorepo_1_channel.py b/tests/e2e/cmd_version/bump_version/github_flow_monorepo/test_monorepo_1_channel.py new file mode 100644 index 000000000..a595a44e2 --- /dev/null +++ b/tests/e2e/cmd_version/bump_version/github_flow_monorepo/test_monorepo_1_channel.py @@ -0,0 +1,249 @@ +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING, Literal, Sequence, cast + +import pytest +from freezegun import freeze_time + +from semantic_release.version.version import Version + +from tests.const import RepoActionStep +from tests.fixtures.monorepos.github_flow import ( + monorepo_w_github_flow_w_default_release_channel_conventional_commits, +) +from tests.util import temporary_working_directory + +if TYPE_CHECKING: + from unittest.mock import MagicMock + + from requests_mock import Mocker + + from tests.e2e.cmd_version.bump_version.conftest import ( + InitMirrorRepo4RebuildFn, + RunPSReleaseFn, + ) + from tests.e2e.conftest import GetSanitizedChangelogContentFn + from tests.fixtures.example_project import ExProjectDir + from tests.fixtures.git_repo import ( + BuildRepoFromDefinitionFn, + BuildSpecificRepoFn, + CommitConvention, + GetGitRepo4DirFn, + RepoActionConfigure, + RepoActionConfigureMonorepo, + RepoActionCreateMonorepo, + RepoActionRelease, + RepoActions, + SplitRepoActionsByReleaseTagsFn, + ) + + +@pytest.mark.parametrize( + "repo_fixture_name", + [ + pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) + for repo_fixture_name in [ + monorepo_w_github_flow_w_default_release_channel_conventional_commits.__name__, + ] + ], +) +def test_githubflow_monorepo_rebuild_1_channel( + repo_fixture_name: str, + run_psr_release: RunPSReleaseFn, + build_monorepo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, + split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, + init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, + example_project_dir: ExProjectDir, + git_repo_for_directory: GetGitRepo4DirFn, + build_repo_from_definition: BuildRepoFromDefinitionFn, + mocked_git_push: MagicMock, + post_mocker: Mocker, + get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, + get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, + monorepo_pkg1_pyproject_toml_file: Path, + monorepo_pkg2_pyproject_toml_file: Path, + monorepo_pkg1_version_py_file: Path, + monorepo_pkg2_version_py_file: Path, + monorepo_pkg1_changelog_md_file: Path, + monorepo_pkg2_changelog_md_file: Path, + monorepo_pkg1_changelog_rst_file: Path, + monorepo_pkg2_changelog_rst_file: Path, +): + # build target repo into a temporary directory + target_repo_dir = example_project_dir / repo_fixture_name + commit_type = cast( + "CommitConvention", repo_fixture_name.split("commits", 1)[0].split("_")[-2] + ) + target_repo_definition = build_monorepo_w_github_flow_w_default_release_channel( + repo_name=repo_fixture_name, + commit_type=commit_type, + dest_dir=target_repo_dir, + ) + target_git_repo = git_repo_for_directory(target_repo_dir) + + # split repo actions by release actions + releasetags_2_steps: dict[ + Version | Literal["Unreleased"] | None, list[RepoActions] + ] = split_repo_actions_by_release_tags(target_repo_definition) + configuration_steps = cast( + "Sequence[RepoActionConfigure | RepoActionCreateMonorepo | RepoActionConfigureMonorepo]", + releasetags_2_steps.pop(None), + ) + release_versions_2_steps = cast( + "dict[Version | Literal['Unreleased'], list[RepoActions]]", + releasetags_2_steps, + ) + + # Create the mirror repo directory + mirror_repo_dir = init_mirror_repo_for_rebuild( + mirror_repo_dir=(example_project_dir / "mirror"), + configuration_steps=configuration_steps, + files_to_remove=[], + ) + mirror_git_repo = git_repo_for_directory(mirror_repo_dir) + + # rebuild repo from scratch stopping before each release tag + for curr_release_key, steps in release_versions_2_steps.items(): + curr_release_str = ( + curr_release_key.as_tag() + if isinstance(curr_release_key, Version) + else curr_release_key + ) + + # make sure mocks are clear + mocked_git_push.reset_mock() + post_mocker.reset_mock() + + # Extract expected result from target repo + if curr_release_str != "Unreleased": + target_git_repo.git.checkout(curr_release_str, detach=True, force=True) + + expected_pkg1_md_changelog_content = get_sanitized_md_changelog_content( + repo_dir=target_repo_dir, changelog_file=monorepo_pkg1_changelog_md_file + ) + expected_pkg2_md_changelog_content = get_sanitized_md_changelog_content( + repo_dir=target_repo_dir, changelog_file=monorepo_pkg2_changelog_md_file + ) + expected_pkg1_rst_changelog_content = get_sanitized_rst_changelog_content( + repo_dir=target_repo_dir, changelog_file=monorepo_pkg1_changelog_rst_file + ) + expected_pkg2_rst_changelog_content = get_sanitized_rst_changelog_content( + repo_dir=target_repo_dir, changelog_file=monorepo_pkg2_changelog_rst_file + ) + expected_pkg1_pyproject_toml_content = ( + target_repo_dir / monorepo_pkg1_pyproject_toml_file + ).read_text() + expected_pkg2_pyproject_toml_content = ( + target_repo_dir / monorepo_pkg2_pyproject_toml_file + ).read_text() + expected_pkg1_version_file_content = ( + target_repo_dir / monorepo_pkg1_version_py_file + ).read_text() + expected_pkg2_version_file_content = ( + target_repo_dir / monorepo_pkg2_version_py_file + ).read_text() + expected_release_commit_text = target_git_repo.head.commit.message + + # In our repo env, start building the repo from the definition + build_repo_from_definition( + dest_dir=mirror_repo_dir, + # stop before the release step + repo_construction_steps=steps[ + : -1 if curr_release_str != "Unreleased" else None + ], + ) + + release_directory = mirror_repo_dir + + for step in steps[::-1]: # reverse order + if step["action"] == RepoActionStep.CHANGE_DIRECTORY: + release_directory = ( + mirror_repo_dir + if str(Path(step["details"]["directory"])) + == str(mirror_repo_dir.root) + else Path(step["details"]["directory"]) + ) + + release_directory = ( + mirror_repo_dir / release_directory + if not release_directory.is_absolute() + else release_directory + ) + + if not release_directory.is_relative_to(mirror_repo_dir): + release_directory = mirror_repo_dir + + break + + # Act: run PSR on the repo instead of the RELEASE step + if curr_release_str != "Unreleased": + release_action_step = cast("RepoActionRelease", steps[-1]) + + with freeze_time( + release_action_step["details"]["datetime"] + ), temporary_working_directory(release_directory): + run_psr_release( + next_version_str=release_action_step["details"]["version"], + git_repo=mirror_git_repo, + config_toml_path=Path("pyproject.toml"), + ) + else: + # run psr changelog command to validate changelog + pass + + # take measurement after running the version command + actual_release_commit_text = mirror_git_repo.head.commit.message + actual_pkg1_pyproject_toml_content = ( + mirror_repo_dir / monorepo_pkg1_pyproject_toml_file + ).read_text() + actual_pkg2_pyproject_toml_content = ( + mirror_repo_dir / monorepo_pkg2_pyproject_toml_file + ).read_text() + actual_pkg1_version_file_content = ( + mirror_repo_dir / monorepo_pkg1_version_py_file + ).read_text() + actual_pkg2_version_file_content = ( + mirror_repo_dir / monorepo_pkg2_version_py_file + ).read_text() + actual_pkg1_md_changelog_content = get_sanitized_md_changelog_content( + repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg1_changelog_md_file + ) + actual_pkg2_md_changelog_content = get_sanitized_md_changelog_content( + repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg2_changelog_md_file + ) + actual_pkg1_rst_changelog_content = get_sanitized_rst_changelog_content( + repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg1_changelog_rst_file + ) + actual_pkg2_rst_changelog_content = get_sanitized_rst_changelog_content( + repo_dir=mirror_repo_dir, changelog_file=monorepo_pkg2_changelog_rst_file + ) + + # Evaluate (normal release actions should have occurred as expected) + # ------------------------------------------------------------------ + # Make sure version file is updated + assert ( + expected_pkg1_pyproject_toml_content == actual_pkg1_pyproject_toml_content + ) + assert ( + expected_pkg2_pyproject_toml_content == actual_pkg2_pyproject_toml_content + ) + assert expected_pkg1_version_file_content == actual_pkg1_version_file_content + assert expected_pkg2_version_file_content == actual_pkg2_version_file_content + + # Make sure changelog is updated + assert expected_pkg1_md_changelog_content == actual_pkg1_md_changelog_content + assert expected_pkg2_md_changelog_content == actual_pkg2_md_changelog_content + assert expected_pkg1_rst_changelog_content == actual_pkg1_rst_changelog_content + assert expected_pkg2_rst_changelog_content == actual_pkg2_rst_changelog_content + + # Make sure commit is created + assert expected_release_commit_text == actual_release_commit_text + + if curr_release_str != "Unreleased": + # Make sure tag is created + assert curr_release_str in [tag.name for tag in mirror_git_repo.tags] + + # Make sure publishing actions occurred + assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag + assert post_mocker.call_count == 1 # vcs release creation occurred diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 209f3654e..2cfcd67ea 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -34,6 +34,7 @@ class GetSanitizedChangelogContentFn(Protocol): def __call__( self, repo_dir: Path, + changelog_file: Path = ..., remove_insertion_flag: bool = True, ) -> str: ... @@ -81,7 +82,7 @@ def config_path(example_project_dir: ExProjectDir) -> Path: return example_project_dir / DEFAULT_CONFIG_FILE -@pytest.fixture +@pytest.fixture(scope="session") def read_config_file() -> ReadConfigFileFn: def _read_config_file(file: Path | str) -> RawConfig: config_text = load_raw_config_file(file) @@ -136,12 +137,12 @@ def _strip_logging_messages(log: str) -> str: @pytest.fixture(scope="session") -def long_hash_pattern() -> Pattern: +def long_hash_pattern() -> Pattern[str]: return regexp(r"\b([0-9a-f]{40})\b", IGNORECASE) @pytest.fixture(scope="session") -def short_hash_pattern() -> Pattern: +def short_hash_pattern() -> Pattern[str]: return regexp(r"\b([0-9a-f]{7})\b", IGNORECASE) @@ -149,18 +150,22 @@ def short_hash_pattern() -> Pattern: def get_sanitized_rst_changelog_content( changelog_rst_file: Path, default_rst_changelog_insertion_flag: str, - long_hash_pattern: Pattern, - short_hash_pattern: Pattern, + long_hash_pattern: Pattern[str], + short_hash_pattern: Pattern[str], ) -> GetSanitizedChangelogContentFn: rst_short_hash_link_pattern = regexp(r"(_[0-9a-f]{7})\b", IGNORECASE) def _get_sanitized_rst_changelog_content( repo_dir: Path, + changelog_file: Path = changelog_rst_file, remove_insertion_flag: bool = False, ) -> str: + if not (changelog_path := repo_dir / changelog_file).exists(): + return "" + # Note that our repo generation fixture includes the insertion flag automatically # toggle remove_insertion_flag to True to remove the insertion flag, applies to Init mode repos - with (repo_dir / changelog_rst_file).open(newline=os.linesep) as rfd: + with changelog_path.open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison changelog_content = ( @@ -182,16 +187,20 @@ def _get_sanitized_rst_changelog_content( def get_sanitized_md_changelog_content( changelog_md_file: Path, default_md_changelog_insertion_flag: str, - long_hash_pattern: Pattern, - short_hash_pattern: Pattern, + long_hash_pattern: Pattern[str], + short_hash_pattern: Pattern[str], ) -> GetSanitizedChangelogContentFn: def _get_sanitized_md_changelog_content( repo_dir: Path, + changelog_file: Path = changelog_md_file, remove_insertion_flag: bool = False, ) -> str: + if not (changelog_path := repo_dir / changelog_file).exists(): + return "" + # Note that our repo generation fixture includes the insertion flag automatically # toggle remove_insertion_flag to True to remove the insertion flag, applies to Init mode repos - with (repo_dir / changelog_md_file).open(newline=os.linesep) as rfd: + with changelog_path.open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison changelog_content = ( diff --git a/tests/e2e/test_main.py b/tests/e2e/test_main.py index 8ce3c58a5..45f13d28b 100644 --- a/tests/e2e/test_main.py +++ b/tests/e2e/test_main.py @@ -3,6 +3,7 @@ import json import subprocess from pathlib import Path +from shutil import rmtree from textwrap import dedent from typing import TYPE_CHECKING @@ -18,8 +19,6 @@ from tests.util import assert_exit_code, assert_successful_exit_code if TYPE_CHECKING: - from pathlib import Path - from tests.conftest import RunCliFn from tests.e2e.conftest import StripLoggingMessagesFn from tests.fixtures.example_project import ExProjectDir, UpdatePyprojectTomlFn @@ -245,7 +244,10 @@ def test_uses_default_config_when_no_config_file_found( # We have to initialise an empty git repository, as the example projects # all have pyproject.toml configs which would be used by default with git.Repo.init(example_project_dir) as repo: + rmtree(str(Path(repo.git_dir, "hooks"))) + repo.git.branch("-M", "main") + with repo.config_writer("repository") as config: config.set_value("user", "name", "semantic release testing") config.set_value("user", "email", "not_a_real@email.com") diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py index d9e987f57..fcf471c9b 100644 --- a/tests/fixtures/__init__.py +++ b/tests/fixtures/__init__.py @@ -1,5 +1,6 @@ from tests.fixtures.commit_parsers import * from tests.fixtures.example_project import * from tests.fixtures.git_repo import * +from tests.fixtures.monorepos import * from tests.fixtures.repos import * from tests.fixtures.scipy import * diff --git a/tests/fixtures/example_project.py b/tests/fixtures/example_project.py index 5d0c60886..d496c4614 100644 --- a/tests/fixtures/example_project.py +++ b/tests/fixtures/example_project.py @@ -3,7 +3,8 @@ import os from pathlib import Path from textwrap import dedent -from typing import TYPE_CHECKING, Generator +from typing import TYPE_CHECKING, Generator, cast +from unittest import mock import pytest import tomlkit @@ -12,11 +13,20 @@ from importlib_resources import files import semantic_release +from semantic_release.cli.config import ( + GlobalCommandLineOptions, + RawConfig, + RuntimeContext, +) +from semantic_release.cli.util import load_raw_config_file from semantic_release.commit_parser import ( ConventionalCommitParser, EmojiCommitParser, ScipyCommitParser, ) +from semantic_release.commit_parser.conventional.parser_monorepo import ( + ConventionalCommitMonorepoParser, +) from semantic_release.hvcs import Bitbucket, Gitea, Github, Gitlab import tests.conftest @@ -37,7 +47,11 @@ if TYPE_CHECKING: from typing import Any, Protocol, Sequence + from tomlkit.container import Container as TOMLContainer + from semantic_release.commit_parser import CommitParser + from semantic_release.commit_parser._base import ParserOptions + from semantic_release.commit_parser.token import ParseResult from semantic_release.hvcs import HvcsBase from semantic_release.version.version import Version @@ -50,28 +64,65 @@ ExProjectDir = Path class GetWheelFileFn(Protocol): - def __call__(self, version_str: str) -> Path: ... + def __call__(self, version_str: str, pkg_name: str = ...) -> Path: ... class SetFlagFn(Protocol): - def __call__(self, flag: bool) -> None: ... + def __call__(self, flag: bool, toml_file: Path | str = ...) -> None: ... class UpdatePyprojectTomlFn(Protocol): - def __call__(self, setting: str, value: Any) -> None: ... + def __call__( + self, setting: str, value: Any, toml_file: Path | str = ... + ) -> None: ... class UseCustomParserFn(Protocol): - def __call__(self, module_import_str: str) -> None: ... + def __call__( + self, module_import_str: str, toml_file: Path | str = ... + ) -> None: ... class UseHvcsFn(Protocol): - def __call__(self, domain: str | None = None) -> type[HvcsBase]: ... + def __call__( + self, domain: str | None = None, toml_file: Path | str = ... + ) -> type[HvcsBase]: ... class UseParserFn(Protocol): - def __call__(self) -> type[CommitParser]: ... + def __call__( + self, toml_file: Path | str = ..., monorepo: bool = ... + ) -> type[CommitParser[ParseResult, ParserOptions]]: ... class UseReleaseNotesTemplateFn(Protocol): - def __call__(self) -> None: ... + def __call__(self, toml_file: Path | str = ...) -> None: ... class UpdateVersionPyFileFn(Protocol): - def __call__(self, version: Version | str) -> None: ... + def __call__( + self, version: Version | str, version_file: Path | str = ... + ) -> None: ... + + class GetHvcsFn(Protocol): + def __call__( + self, + hvcs_client_name: str, + origin_url: str = ..., + hvcs_domain: str | None = None, + ) -> Github | Gitlab | Gitea | Bitbucket: ... + + class ReadConfigFileFn(Protocol): + """Read the raw config file from `config_path`.""" + + def __call__(self, file: Path | str = ...) -> RawConfig: ... + + class LoadRuntimeContextFn(Protocol): + """Load the runtime context from the config file.""" + + def __call__( + self, cli_opts: GlobalCommandLineOptions | None = None + ) -> RuntimeContext: ... + + class GetParserFromConfigFileFn(Protocol): + """Get the commit parser from the config file.""" + + def __call__( + self, file: Path | str = ... + ) -> CommitParser[ParseResult, ParserOptions]: ... @pytest.fixture(scope="session") @@ -268,12 +319,58 @@ def default_changelog_rst_template() -> Path: @pytest.fixture(scope="session") def get_wheel_file(dist_dir: Path) -> GetWheelFileFn: - def _get_wheel_file(version_str: str) -> Path: - return dist_dir / f"{EXAMPLE_PROJECT_NAME}-{version_str}-py3-none-any.whl" + def _get_wheel_file( + version_str: str, + pkg_name: str = EXAMPLE_PROJECT_NAME, + ) -> Path: + return dist_dir.joinpath( + f"{pkg_name.replace('-', '_')}-{version_str}-py3-none-any.whl" + ) return _get_wheel_file +@pytest.fixture(scope="session") +def read_config_file(pyproject_toml_file: Path) -> ReadConfigFileFn: + def _read_config_file(file: Path | str = pyproject_toml_file) -> RawConfig: + config_text = load_raw_config_file(file) + return RawConfig.model_validate(config_text) + + return _read_config_file + + +@pytest.fixture(scope="session") +def load_runtime_context( + read_config_file: ReadConfigFileFn, + pyproject_toml_file: Path, +) -> LoadRuntimeContextFn: + def _load_runtime_context( + cli_opts: GlobalCommandLineOptions | None = None, + ) -> RuntimeContext: + opts = cli_opts or GlobalCommandLineOptions( + config_file=str(pyproject_toml_file), + ) + raw_config = read_config_file(opts.config_file) + return RuntimeContext.from_raw_config(raw_config, opts) + + return _load_runtime_context + + +@pytest.fixture(scope="session") +def get_parser_from_config_file( + pyproject_toml_file: Path, + load_runtime_context: LoadRuntimeContextFn, +) -> GetParserFromConfigFileFn: + def _get_parser_from_config( + file: Path | str = pyproject_toml_file, + ) -> CommitParser[ParseResult, ParserOptions]: + return load_runtime_context( + cli_opts=GlobalCommandLineOptions(config_file=str(Path(file))) + ).commit_parser + + return _get_parser_from_config + + @pytest.fixture def example_project_dir(tmp_path: Path) -> ExProjectDir: return tmp_path.resolve() @@ -300,11 +397,15 @@ def use_release_notes_template( example_project_template_dir: Path, changelog_template_dir: Path, update_pyproject_toml: UpdatePyprojectTomlFn, + pyproject_toml_file: Path, ) -> UseReleaseNotesTemplateFn: - def _use_release_notes_template() -> None: + def _use_release_notes_template( + toml_file: Path | str = pyproject_toml_file, + ) -> None: update_pyproject_toml( "tool.semantic_release.changelog.template_dir", str(changelog_template_dir), + toml_file=toml_file, ) example_project_template_dir.mkdir(parents=True, exist_ok=True) release_notes_j2 = example_project_template_dir / ".release_notes.md.j2" @@ -381,8 +482,10 @@ def example_project_template_dir( @pytest.fixture(scope="session") def update_version_py_file(version_py_file: Path) -> UpdateVersionPyFileFn: - def _update_version_py_file(version: Version | str) -> None: - cwd_version_py = version_py_file.resolve() + def _update_version_py_file( + version: Version | str, version_file: Path | str = version_py_file + ) -> None: + cwd_version_py = Path(version_file).resolve() cwd_version_py.parent.mkdir(parents=True, exist_ok=True) cwd_version_py.write_text( dedent( @@ -399,8 +502,10 @@ def _update_version_py_file(version: Version | str) -> None: def update_pyproject_toml(pyproject_toml_file: Path) -> UpdatePyprojectTomlFn: """Update the pyproject.toml file with the given content.""" - def _update_pyproject_toml(setting: str, value: Any) -> None: - cwd_pyproject_toml = pyproject_toml_file.resolve() + def _update_pyproject_toml( + setting: str, value: Any, toml_file: Path | str = pyproject_toml_file + ) -> None: + cwd_pyproject_toml = Path(toml_file).resolve() with open(cwd_pyproject_toml) as rfd: pyproject_toml = tomlkit.load(rfd) @@ -409,11 +514,13 @@ def _update_pyproject_toml(setting: str, value: Any) -> None: new_setting_key = parts.pop(-1) new_setting[new_setting_key] = value - pointer = pyproject_toml + pointer: TOMLContainer = pyproject_toml for part in parts: - if pointer.get(part, None) is None: - pointer.add(part, tomlkit.table()) - pointer = pointer.get(part, {}) + if (next_pointer := pointer.get(part, None)) is None: + next_pointer = tomlkit.table() + pointer.add(part, next_pointer) + + pointer = cast("TOMLContainer", next_pointer) if value is None: pointer.pop(new_setting_key) @@ -432,127 +539,272 @@ def pyproject_toml_config_option_parser() -> str: @pytest.fixture(scope="session") -def set_major_on_zero(update_pyproject_toml: UpdatePyprojectTomlFn) -> SetFlagFn: +def pyproject_toml_config_option_remote_type() -> str: + return f"tool.{semantic_release.__name__}.remote.type" + + +@pytest.fixture(scope="session") +def pyproject_toml_config_option_remote_domain() -> str: + return f"tool.{semantic_release.__name__}.remote.domain" + + +@pytest.fixture(scope="session") +def set_major_on_zero( + pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn +) -> SetFlagFn: """Turn on/off the major_on_zero setting.""" - def _set_major_on_zero(flag: bool) -> None: - update_pyproject_toml("tool.semantic_release.major_on_zero", flag) + def _set_major_on_zero( + flag: bool, toml_file: Path | str = pyproject_toml_file + ) -> None: + update_pyproject_toml("tool.semantic_release.major_on_zero", flag, toml_file) return _set_major_on_zero @pytest.fixture(scope="session") -def set_allow_zero_version(update_pyproject_toml: UpdatePyprojectTomlFn) -> SetFlagFn: +def set_allow_zero_version( + pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn +) -> SetFlagFn: """Turn on/off the allow_zero_version setting.""" - def _set_allow_zero_version(flag: bool) -> None: - update_pyproject_toml("tool.semantic_release.allow_zero_version", flag) + def _set_allow_zero_version( + flag: bool, toml_file: Path | str = pyproject_toml_file + ) -> None: + update_pyproject_toml( + "tool.semantic_release.allow_zero_version", flag, toml_file + ) return _set_allow_zero_version @pytest.fixture(scope="session") def use_conventional_parser( + pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseParserFn: """Modify the configuration file to use the Conventional parser.""" - def _use_conventional_parser() -> type[CommitParser]: - update_pyproject_toml(pyproject_toml_config_option_parser, "conventional") - return ConventionalCommitParser + def _use_conventional_parser( + toml_file: Path | str = pyproject_toml_file, monorepo: bool = False + ) -> type[CommitParser[ParseResult, ParserOptions]]: + update_pyproject_toml( + pyproject_toml_config_option_parser, + f"conventional{'-monorepo' if monorepo else ''}", + toml_file=toml_file, + ) + return cast( + "type[CommitParser[ParseResult, ParserOptions]]", + ConventionalCommitMonorepoParser if monorepo else ConventionalCommitParser, + ) return _use_conventional_parser @pytest.fixture(scope="session") def use_emoji_parser( + pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseParserFn: """Modify the configuration file to use the Emoji parser.""" - def _use_emoji_parser() -> type[CommitParser]: - update_pyproject_toml(pyproject_toml_config_option_parser, "emoji") - return EmojiCommitParser + def _use_emoji_parser( + toml_file: Path | str = pyproject_toml_file, monorepo: bool = False + ) -> type[CommitParser[ParseResult, ParserOptions]]: + if monorepo: + raise ValueError( + "The Emoji parser does not support monorepo mode. " + "Use the conventional parser instead." + ) + + update_pyproject_toml( + pyproject_toml_config_option_parser, "emoji", toml_file=toml_file + ) + return cast("type[CommitParser[ParseResult, ParserOptions]]", EmojiCommitParser) return _use_emoji_parser @pytest.fixture(scope="session") def use_scipy_parser( + pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseParserFn: """Modify the configuration file to use the Scipy parser.""" - def _use_scipy_parser() -> type[CommitParser]: - update_pyproject_toml(pyproject_toml_config_option_parser, "scipy") - return ScipyCommitParser + def _use_scipy_parser( + toml_file: Path | str = pyproject_toml_file, monorepo: bool = False + ) -> type[CommitParser[ParseResult, ParserOptions]]: + if monorepo: + raise ValueError( + "The Scipy parser does not support monorepo mode. " + "Use the conventional parser instead." + ) + + update_pyproject_toml( + pyproject_toml_config_option_parser, "scipy", toml_file=toml_file + ) + return cast("type[CommitParser[ParseResult, ParserOptions]]", ScipyCommitParser) return _use_scipy_parser @pytest.fixture(scope="session") def use_custom_parser( + pyproject_toml_file: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseCustomParserFn: """Modify the configuration file to use a user defined string parser.""" - def _use_custom_parser(module_import_str: str) -> None: - update_pyproject_toml(pyproject_toml_config_option_parser, module_import_str) + def _use_custom_parser( + module_import_str: str, toml_file: Path | str = pyproject_toml_file + ) -> None: + update_pyproject_toml( + pyproject_toml_config_option_parser, module_import_str, toml_file=toml_file + ) return _use_custom_parser @pytest.fixture(scope="session") -def use_github_hvcs(update_pyproject_toml: UpdatePyprojectTomlFn) -> UseHvcsFn: +def get_hvcs(example_git_https_url: str) -> GetHvcsFn: + hvcs_clients: dict[str, type[HvcsBase]] = { + "github": Github, + "gitlab": Gitlab, + "gitea": Gitea, + "bitbucket": Bitbucket, + } + + def _get_hvcs( + hvcs_client_name: str, + origin_url: str = example_git_https_url, + hvcs_domain: str | None = None, + ) -> Github | Gitlab | Gitea | Bitbucket: + if (hvcs_class := hvcs_clients.get(hvcs_client_name)) is None: + raise ValueError(f"Unknown HVCS client name: {hvcs_client_name}") + + # Create HVCS Client instance + with mock.patch.dict(os.environ, {}, clear=True): + hvcs = hvcs_class(origin_url, hvcs_domain=hvcs_domain) + assert hvcs.repo_name # Force the HVCS client to cache the repo name + assert hvcs.owner # Force the HVCS client to cache the owner + + return cast("Github | Gitlab | Gitea | Bitbucket", hvcs) + + return _get_hvcs + + +@pytest.fixture(scope="session") +def use_github_hvcs( + pyproject_toml_file: Path, + update_pyproject_toml: UpdatePyprojectTomlFn, + pyproject_toml_config_option_remote_type: str, + pyproject_toml_config_option_remote_domain: str, +) -> UseHvcsFn: """Modify the configuration file to use GitHub as the HVCS.""" - def _use_github_hvcs(domain: str | None = None) -> type[HvcsBase]: - update_pyproject_toml("tool.semantic_release.remote.type", "github") + def _use_github_hvcs( + domain: str | None = None, toml_file: Path | str = pyproject_toml_file + ) -> type[HvcsBase]: + update_pyproject_toml( + pyproject_toml_config_option_remote_type, + Github.__name__.lower(), + toml_file=toml_file, + ) + if domain is not None: - update_pyproject_toml("tool.semantic_release.remote.domain", domain) + update_pyproject_toml( + pyproject_toml_config_option_remote_domain, domain, toml_file=toml_file + ) + return Github return _use_github_hvcs @pytest.fixture(scope="session") -def use_gitlab_hvcs(update_pyproject_toml: UpdatePyprojectTomlFn) -> UseHvcsFn: +def use_gitlab_hvcs( + pyproject_toml_file: Path, + update_pyproject_toml: UpdatePyprojectTomlFn, + pyproject_toml_config_option_remote_type: str, + pyproject_toml_config_option_remote_domain: str, +) -> UseHvcsFn: """Modify the configuration file to use GitLab as the HVCS.""" - def _use_gitlab_hvcs(domain: str | None = None) -> type[HvcsBase]: - update_pyproject_toml("tool.semantic_release.remote.type", "gitlab") + def _use_gitlab_hvcs( + domain: str | None = None, toml_file: Path | str = pyproject_toml_file + ) -> type[HvcsBase]: + update_pyproject_toml( + pyproject_toml_config_option_remote_type, + Gitlab.__name__.lower(), + toml_file=toml_file, + ) + if domain is not None: - update_pyproject_toml("tool.semantic_release.remote.domain", domain) + update_pyproject_toml( + pyproject_toml_config_option_remote_domain, domain, toml_file=toml_file + ) + return Gitlab return _use_gitlab_hvcs @pytest.fixture(scope="session") -def use_gitea_hvcs(update_pyproject_toml: UpdatePyprojectTomlFn) -> UseHvcsFn: +def use_gitea_hvcs( + pyproject_toml_file: Path, + update_pyproject_toml: UpdatePyprojectTomlFn, + pyproject_toml_config_option_remote_type: str, + pyproject_toml_config_option_remote_domain: str, +) -> UseHvcsFn: """Modify the configuration file to use Gitea as the HVCS.""" - def _use_gitea_hvcs(domain: str | None = None) -> type[HvcsBase]: - update_pyproject_toml("tool.semantic_release.remote.type", "gitea") + def _use_gitea_hvcs( + domain: str | None = None, toml_file: Path | str = pyproject_toml_file + ) -> type[HvcsBase]: + update_pyproject_toml( + pyproject_toml_config_option_remote_type, + Gitea.__name__.lower(), + toml_file=toml_file, + ) + if domain is not None: - update_pyproject_toml("tool.semantic_release.remote.domain", domain) + update_pyproject_toml( + pyproject_toml_config_option_remote_domain, domain, toml_file=toml_file + ) + return Gitea return _use_gitea_hvcs @pytest.fixture(scope="session") -def use_bitbucket_hvcs(update_pyproject_toml: UpdatePyprojectTomlFn) -> UseHvcsFn: +def use_bitbucket_hvcs( + pyproject_toml_file: Path, + update_pyproject_toml: UpdatePyprojectTomlFn, + pyproject_toml_config_option_remote_type: str, + pyproject_toml_config_option_remote_domain: str, +) -> UseHvcsFn: """Modify the configuration file to use BitBucket as the HVCS.""" - def _use_bitbucket_hvcs(domain: str | None = None) -> type[HvcsBase]: - update_pyproject_toml("tool.semantic_release.remote.type", "bitbucket") + def _use_bitbucket_hvcs( + domain: str | None = None, toml_file: Path | str = pyproject_toml_file + ) -> type[HvcsBase]: + update_pyproject_toml( + pyproject_toml_config_option_remote_type, + Bitbucket.__name__.lower(), + toml_file=toml_file, + ) + if domain is not None: - update_pyproject_toml("tool.semantic_release.remote.domain", domain) + update_pyproject_toml( + pyproject_toml_config_option_remote_domain, domain, toml_file=toml_file + ) + return Bitbucket return _use_bitbucket_hvcs diff --git a/tests/fixtures/git_repo.py b/tests/fixtures/git_repo.py index 7ad6ac0be..e19558a1b 100644 --- a/tests/fixtures/git_repo.py +++ b/tests/fixtures/git_repo.py @@ -5,22 +5,18 @@ from copy import deepcopy from datetime import datetime, timedelta from functools import reduce +from itertools import count from pathlib import Path +from shutil import rmtree from textwrap import dedent from time import sleep -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING, TypeVar, cast from unittest import mock import pytest from git import Actor, Repo from semantic_release.cli.config import ChangelogOutputFormat -from semantic_release.commit_parser.conventional import ( - ConventionalCommitParser, - ConventionalCommitParserOptions, -) -from semantic_release.commit_parser.emoji import EmojiCommitParser, EmojiParserOptions -from semantic_release.commit_parser.scipy import ScipyCommitParser, ScipyParserOptions from semantic_release.hvcs.bitbucket import Bitbucket from semantic_release.hvcs.gitea import Gitea from semantic_release.hvcs.github import Github @@ -35,6 +31,7 @@ DEFAULT_BRANCH_NAME, DEFAULT_MERGE_STRATEGY_OPTION, EXAMPLE_HVCS_DOMAIN, + EXAMPLE_PROJECT_NAME, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, NULL_HEX_SHA, @@ -49,7 +46,23 @@ if TYPE_CHECKING: from typing import Any, Generator, Literal, Protocol, Sequence, TypedDict, Union - from tests.fixtures.example_project import UpdateVersionPyFileFn + from semantic_release.commit_parser._base import CommitParser, ParserOptions + from semantic_release.commit_parser.conventional import ( + ConventionalCommitParser, + ) + from semantic_release.commit_parser.conventional.parser_monorepo import ( + ConventionalCommitMonorepoParser, + ) + from semantic_release.commit_parser.emoji import EmojiCommitParser + from semantic_release.commit_parser.scipy import ScipyCommitParser + from semantic_release.commit_parser.token import ParsedMessageResult, ParseResult + + from tests.fixtures.example_project import ( + GetHvcsFn, + GetParserFromConfigFileFn, + UpdateVersionPyFileFn, + ) + from tests.fixtures.monorepos.git_monorepo import BuildMonorepoFn try: # Python 3.8 and 3.9 compatibility @@ -80,7 +93,9 @@ CommitMsg = str DatetimeISOStr = str ChangelogTypeHeading = str - TomlSerializableTypes = Union[dict, set, list, tuple, int, float, bool, str] + TomlSerializableTypes = Union[ + dict[Any, Any], set[Any], list[Any], tuple[Any, ...], int, float, bool, str + ] class RepoVersionDef(TypedDict): """ @@ -101,6 +116,7 @@ class ChangelogTypeHeadingDef(TypedDict): """List of indexes values to match to the commits list in the RepoVersionDef""" class CommitDef(TypedDict): + cid: str msg: CommitMsg type: str category: str @@ -128,6 +144,8 @@ def __call__( tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, # Default as of v10 + package_name: str = ..., + monorepo: bool = False, ) -> tuple[Path, HvcsBase]: ... class CommitNReturnChangelogEntryFn(Protocol): @@ -145,6 +163,8 @@ def __call__( version: str, tag_format: str = ..., timestamp: DatetimeISOStr | None = None, + version_py_file: Path | str = ..., + commit_message_format: str = ..., ) -> None: ... class ExProjectGitRepoFn(Protocol): @@ -157,8 +177,10 @@ def __call__( commit_type: CommitConvention, ) -> RepoDefinition: ... - class GetCommitDefFn(Protocol): - def __call__(self, msg: str) -> CommitDef: ... + T_contra = TypeVar("T_contra", contravariant=True) + + class GetCommitDefFn(Protocol[T_contra]): + def __call__(self, msg: str, parser: T_contra) -> CommitDef: ... class GetVersionStringsFn(Protocol): def __call__(self) -> list[VersionStr]: ... @@ -183,7 +205,7 @@ def __call__( repo_definition: RepoDefinition, hvcs: Github | Gitlab | Gitea | Bitbucket, dest_file: Path | None = None, - max_version: str | None = None, + max_version: Version | None = None, output_format: ChangelogOutputFormat = ChangelogOutputFormat.MARKDOWN, mask_initial_release: bool = True, # Default as of v10 ) -> str: ... @@ -247,6 +269,7 @@ def __call__( ) -> CommitDef: ... class CommitSpec(TypedDict): + cid: str conventional: str emoji: str scipy: str @@ -269,6 +292,34 @@ class RepoActionConfigureDetails(DetailsBase): mask_initial_release: bool extra_configs: dict[str, TomlSerializableTypes] + class RepoActionConfigureMonorepo(TypedDict): + action: Literal[RepoActionStep.CONFIGURE_MONOREPO] + details: RepoActionConfigureMonorepoDetails + + class RepoActionConfigureMonorepoDetails(DetailsBase): + package_dir: Path | str + package_name: str + tag_format_str: str | None + mask_initial_release: bool + extra_configs: dict[str, TomlSerializableTypes] + + class RepoActionCreateMonorepo(TypedDict): + action: Literal[RepoActionStep.CREATE_MONOREPO] + details: RepoActionCreateMonorepoDetails + + class RepoActionCreateMonorepoDetails(DetailsBase): + commit_type: CommitConvention + hvcs_client_name: str + hvcs_domain: str + origin_url: NotRequired[str] + + class RepoActionChangeDirectory(TypedDict): + action: Literal[RepoActionStep.CHANGE_DIRECTORY] + details: RepoActionChangeDirectoryDetails + + class RepoActionChangeDirectoryDetails(DetailsBase): + directory: Path | str + class RepoActionMakeCommits(TypedDict): action: Literal[RepoActionStep.MAKE_COMMITS] details: RepoActionMakeCommitsDetails @@ -281,8 +332,11 @@ class RepoActionRelease(TypedDict): details: RepoActionReleaseDetails class RepoActionReleaseDetails(DetailsBase): - version: str + commit_message_format: NotRequired[str] datetime: DatetimeISOStr + tag_format: NotRequired[str] + version: str + version_py_file: NotRequired[Path | str] class RepoActionGitCheckout(TypedDict): action: Literal[RepoActionStep.GIT_CHECKOUT] @@ -304,6 +358,7 @@ class RepoActionGitSquashDetails(DetailsBase): branch: str strategy_option: str commit_def: CommitDef + config_file: Path | str class RepoActionGitMerge(TypedDict): action: Literal[RepoActionStep.GIT_MERGE] @@ -324,17 +379,23 @@ class RepoActionWriteChangelogs(TypedDict): details: RepoActionWriteChangelogsDetails class RepoActionWriteChangelogsDetails(DetailsBase): - new_version: str - max_version: NotRequired[str] + new_version: Version + max_version: NotRequired[Version] dest_files: Sequence[RepoActionWriteChangelogsDestFile] + commit_ids: Sequence[str] class RepoActionWriteChangelogsDestFile(TypedDict): path: Path | str format: ChangelogOutputFormat + mask_initial_release: bool class ConvertCommitSpecToCommitDefFn(Protocol): def __call__( - self, commit_spec: CommitSpec, commit_type: CommitConvention + self, + commit_spec: CommitSpec, + commit_type: CommitConvention, + parser: CommitParser[ParseResult, ParserOptions], + monorepo: bool = ..., ) -> CommitDef: ... class GetRepoDefinitionFn(Protocol): @@ -361,11 +422,15 @@ class BuiltRepoResult(TypedDict): repo: Repo class GetVersionsFromRepoBuildDefFn(Protocol): - def __call__(self, repo_def: Sequence[RepoActions]) -> Sequence[str]: ... + def __call__(self, repo_def: Sequence[RepoActions]) -> Sequence[Version]: ... class ConvertCommitSpecsToCommitDefsFn(Protocol): def __call__( - self, commits: Sequence[CommitSpec], commit_type: CommitConvention + self, + commits: Sequence[CommitSpec], + commit_type: CommitConvention, + parser: CommitParser[ParseResult, ParserOptions], + monorepo: bool = ..., ) -> Sequence[CommitDef]: ... class BuildSpecificRepoFn(Protocol): @@ -374,13 +439,16 @@ def __call__( ) -> Sequence[RepoActions]: ... RepoActions: TypeAlias = Union[ + RepoActionChangeDirectory, RepoActionConfigure, - RepoActionMakeCommits, - RepoActionRelease, + RepoActionConfigureMonorepo, + RepoActionCreateMonorepo, RepoActionGitCheckout, + RepoActionGitMerge, RepoActionGitSquash, + RepoActionMakeCommits, + RepoActionRelease, RepoActionWriteChangelogs, - RepoActionGitMerge, ] class GetGitRepo4DirFn(Protocol): @@ -388,16 +456,24 @@ def __call__(self, directory: Path | str) -> Repo: ... class SplitRepoActionsByReleaseTagsFn(Protocol): def __call__( - self, repo_definition: Sequence[RepoActions], tag_format_str: str - ) -> dict[str, list[RepoActions]]: ... + self, + repo_definition: Sequence[RepoActions], + ) -> dict[Version | Literal["Unreleased"] | None, list[RepoActions]]: ... class GetCfgValueFromDefFn(Protocol): def __call__( self, build_definition: Sequence[RepoActions], key: str ) -> Any: ... + class SquashedCommitSupportedParser(Protocol): + def unsquash_commit_message(self, message: str) -> list[str]: ... + + def parse_message(self, message: str) -> ParsedMessageResult | None: ... + class SeparateSquashedCommitDefFn(Protocol): - def __call__(self, squashed_commit_def: CommitDef) -> list[CommitDef]: ... + def __call__( + self, squashed_commit_def: CommitDef, parser: SquashedCommitSupportedParser + ) -> list[CommitDef]: ... class GenerateDefaultReleaseNotesFromDefFn(Protocol): def __call__( @@ -469,6 +545,7 @@ def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: # NOTE: We don't want to hold the repo object open for the entire test session, # the implementation on Windows holds some file descriptors open until close is called. with Repo.init(cached_repo_path) as repo: + rmtree(str(Path(repo.git_dir, "hooks"))) # Without this the global config may set it to "master", we want consistency repo.git.branch("-M", DEFAULT_BRANCH_NAME) with repo.config_writer("repository") as config: @@ -520,12 +597,11 @@ def example_git_https_url(): @pytest.fixture(scope="session") -def get_commit_def_of_conventional_commit( - default_conventional_parser: ConventionalCommitParser, -) -> GetCommitDefFn: - def _get_commit_def_of_conventional_commit(msg: str) -> CommitDef: - if not (parsed_result := default_conventional_parser.parse_message(msg)): +def get_commit_def_of_conventional_commit() -> GetCommitDefFn[ConventionalCommitParser]: + def _get_commit_def(msg: str, parser: ConventionalCommitParser) -> CommitDef: + if not (parsed_result := parser.parse_message(msg)): return { + "cid": "", "msg": msg, "type": "unknown", "category": "Unknown", @@ -538,6 +614,7 @@ def _get_commit_def_of_conventional_commit(msg: str) -> CommitDef: } return { + "cid": "", "msg": msg, "type": parsed_result.type, "category": parsed_result.category, @@ -549,16 +626,54 @@ def _get_commit_def_of_conventional_commit(msg: str) -> CommitDef: "include_in_changelog": True, } - return _get_commit_def_of_conventional_commit + return _get_commit_def @pytest.fixture(scope="session") -def get_commit_def_of_emoji_commit( - default_emoji_parser: EmojiCommitParser, -) -> GetCommitDefFn: - def _get_commit_def_of_emoji_commit(msg: str) -> CommitDef: - if not (parsed_result := default_emoji_parser.parse_message(msg)): +def get_commit_def_of_conventional_commit_monorepo() -> ( + GetCommitDefFn[ConventionalCommitMonorepoParser] +): + def _get_commit_def( + msg: str, parser: ConventionalCommitMonorepoParser + ) -> CommitDef: + if not (parsed_result := parser.parse_message(msg)): return { + "cid": "", + "msg": msg, + "type": "unknown", + "category": "Unknown", + "desc": msg, + "brking_desc": "", + "scope": "", + "mr": "", + "sha": NULL_HEX_SHA, + "include_in_changelog": False, + } + + return { + "cid": "", + "msg": msg, + "type": parsed_result.type, + "category": parsed_result.category, + "desc": str.join("\n\n", parsed_result.descriptions), + "brking_desc": str.join("\n\n", parsed_result.breaking_descriptions), + "scope": parsed_result.scope, + "mr": parsed_result.linked_merge_request, + "sha": NULL_HEX_SHA, + "include_in_changelog": True, + } + + return _get_commit_def + + +@pytest.fixture(scope="session") +def get_commit_def_of_emoji_commit() -> GetCommitDefFn[EmojiCommitParser]: + def _get_commit_def_of_emoji_commit( + msg: str, parser: EmojiCommitParser + ) -> CommitDef: + if not (parsed_result := parser.parse_message(msg)): + return { + "cid": "", "msg": msg, "type": "unknown", "category": "Other", @@ -571,6 +686,7 @@ def _get_commit_def_of_emoji_commit(msg: str) -> CommitDef: } return { + "cid": "", "msg": msg, "type": parsed_result.type, "category": parsed_result.category, @@ -586,12 +702,13 @@ def _get_commit_def_of_emoji_commit(msg: str) -> CommitDef: @pytest.fixture(scope="session") -def get_commit_def_of_scipy_commit( - default_scipy_parser: ScipyCommitParser, -) -> GetCommitDefFn: - def _get_commit_def_of_scipy_commit(msg: str) -> CommitDef: - if not (parsed_result := default_scipy_parser.parse_message(msg)): +def get_commit_def_of_scipy_commit() -> GetCommitDefFn[ScipyCommitParser]: + def _get_commit_def_of_scipy_commit( + msg: str, parser: ScipyCommitParser + ) -> CommitDef: + if not (parsed_result := parser.parse_message(msg)): return { + "cid": "", "msg": msg, "type": "unknown", "category": "Unknown", @@ -604,6 +721,7 @@ def _get_commit_def_of_scipy_commit(msg: str) -> CommitDef: } return { + "cid": "", "msg": msg, "type": parsed_result.type, "category": parsed_result.category, @@ -718,13 +836,18 @@ def _format_squash_commit_msg_github( sq_cmts: list[str] = ( squashed_commits # type: ignore[assignment] if len(squashed_commits) > 1 and not isinstance(squashed_commits[0], dict) - else [commit["msg"] for commit in squashed_commits] # type: ignore[index] + else [ + commit.get("msg") if isinstance(commit, dict) else commit + for commit in squashed_commits + ] ) + pr_title_parts = pr_title.strip().split("\n\n", maxsplit=1) return ( str.join( "\n\n", [ - f"{pr_title} (#{pr_number})", + f"{pr_title_parts[0]} (#{pr_number})", + *pr_title_parts[1:], *[f"* {commit_str}" for commit_str in sq_cmts], ], ) @@ -855,6 +978,8 @@ def _mimic_semantic_release_commit( version: str, tag_format: str = default_tag_format_str, timestamp: DatetimeISOStr | None = None, + version_py_file: Path | str = "", + commit_message_format: str = COMMIT_MESSAGE, ) -> None: curr_dt = stable_now_date() commit_dt = ( @@ -865,7 +990,14 @@ def _mimic_semantic_release_commit( sleep(1) # ensure commit timestamps are unique # stamp version into version file - update_version_py_file(version) + update_version_py_file( + **dict( + filter( + lambda k_v: k_v[1], + {"version": version, "version_file": version_py_file}.items(), + ) + ) + ) # stamp version into pyproject.toml update_pyproject_toml("tool.poetry.version", version) @@ -873,7 +1005,7 @@ def _mimic_semantic_release_commit( # commit --all files with version number commit message git_repo.git.commit( a=True, - m=COMMIT_MESSAGE.format(version=version), + m=commit_message_format.format(version=version), date=commit_dt.isoformat(timespec="seconds"), ) @@ -932,10 +1064,13 @@ def simulate_change_commits_n_rtn_changelog_entry( def _simulate_change_commits_n_rtn_changelog_entry( git_repo: Repo, commit_msgs: Sequence[CommitDef] ) -> Sequence[CommitDef]: - changelog_entries = [] + changelog_entries: list[CommitDef] = [] for commit_msg in commit_msgs: - add_text_to_file(git_repo, file_in_repo) + if not git_repo.is_dirty(index=True, working_tree=False): + add_text_to_file(git_repo, file_in_repo) + changelog_entries.append(commit_n_rtn_changelog_entry(git_repo, commit_msg)) + return changelog_entries return _simulate_change_commits_n_rtn_changelog_entry @@ -970,6 +1105,7 @@ def _get_hvcs_client_from_repo_def( ) # Force the HVCS client to attempt to resolve the repo name (as we generally cache it) assert hvcs_client.repo_name + assert hvcs_client.owner return cast("Github | Gitlab | Gitea | Bitbucket", hvcs_client) return _get_hvcs_client_from_repo_def @@ -978,6 +1114,49 @@ def _get_hvcs_client_from_repo_def( @pytest.fixture(scope="session") def build_configured_base_repo( # noqa: C901 cached_example_git_project: Path, + configure_base_repo: BuildRepoFn, +) -> BuildRepoFn: + """ + This fixture is intended to simplify repo scenario building by initially + creating the repo but also configuring semantic_release in the pyproject.toml + for when the test executes semantic_release. It returns a function so that + derivative fixtures can call this fixture with individual parameters. + """ + + def _build_configured_base_repo( # noqa: C901 + dest_dir: Path | str, + commit_type: CommitConvention = "conventional", + hvcs_client_name: str = "github", + hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, + tag_format_str: str | None = None, + extra_configs: dict[str, TomlSerializableTypes] | None = None, + mask_initial_release: bool = True, # Default as of v10 + package_name: str = EXAMPLE_PROJECT_NAME, + monorepo: bool = False, + ) -> tuple[Path, HvcsBase]: + if not cached_example_git_project.exists(): + raise RuntimeError("Unable to find cached git project files!") + + # Copy the cached git project the dest directory + copy_dir_tree(cached_example_git_project, dest_dir) + + return configure_base_repo( + dest_dir=dest_dir, + commit_type=commit_type, + hvcs_client_name=hvcs_client_name, + hvcs_domain=hvcs_domain, + tag_format_str=tag_format_str, + extra_configs=extra_configs, + mask_initial_release=mask_initial_release, + package_name=package_name, + monorepo=monorepo, + ) + + return _build_configured_base_repo + + +@pytest.fixture(scope="session") +def configure_base_repo( # noqa: C901 use_github_hvcs: UseHvcsFn, use_gitlab_hvcs: UseHvcsFn, use_gitea_hvcs: UseHvcsFn, @@ -989,6 +1168,8 @@ def build_configured_base_repo( # noqa: C901 example_git_https_url: str, update_pyproject_toml: UpdatePyprojectTomlFn, get_wheel_file: GetWheelFileFn, + pyproject_toml_file: Path, + get_hvcs: GetHvcsFn, ) -> BuildRepoFn: """ This fixture is intended to simplify repo scenario building by initially @@ -997,7 +1178,7 @@ def build_configured_base_repo( # noqa: C901 derivative fixtures can call this fixture with individual parameters. """ - def _build_configured_base_repo( # noqa: C901 + def _configure_base_repo( # noqa: C901 dest_dir: Path | str, commit_type: str = "conventional", hvcs_client_name: str = "github", @@ -1005,53 +1186,55 @@ def _build_configured_base_repo( # noqa: C901 tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = True, # Default as of v10 + package_name: str = EXAMPLE_PROJECT_NAME, + monorepo: bool = False, ) -> tuple[Path, HvcsBase]: - if not cached_example_git_project.exists(): - raise RuntimeError("Unable to find cached git project files!") - - # Copy the cached git project the dest directory - copy_dir_tree(cached_example_git_project, dest_dir) - # Make sure we are in the dest directory with temporary_working_directory(dest_dir): # Set parser configuration if commit_type == "conventional": - use_conventional_parser() + use_conventional_parser( + toml_file=pyproject_toml_file, monorepo=monorepo + ) elif commit_type == "emoji": - use_emoji_parser() + use_emoji_parser(toml_file=pyproject_toml_file, monorepo=monorepo) elif commit_type == "scipy": - use_scipy_parser() + use_scipy_parser(toml_file=pyproject_toml_file, monorepo=monorepo) else: - use_custom_parser(commit_type) + use_custom_parser(commit_type, toml_file=pyproject_toml_file) # Set HVCS configuration if hvcs_client_name == "github": - hvcs_class = use_github_hvcs(hvcs_domain) + use_github_hvcs(hvcs_domain, toml_file=pyproject_toml_file) elif hvcs_client_name == "gitlab": - hvcs_class = use_gitlab_hvcs(hvcs_domain) + use_gitlab_hvcs(hvcs_domain, toml_file=pyproject_toml_file) elif hvcs_client_name == "gitea": - hvcs_class = use_gitea_hvcs(hvcs_domain) + use_gitea_hvcs(hvcs_domain, toml_file=pyproject_toml_file) elif hvcs_client_name == "bitbucket": - hvcs_class = use_bitbucket_hvcs(hvcs_domain) + use_bitbucket_hvcs(hvcs_domain, toml_file=pyproject_toml_file) else: raise ValueError(f"Unknown HVCS client name: {hvcs_client_name}") # Create HVCS Client instance - with mock.patch.dict(os.environ, {}, clear=True): - hvcs = hvcs_class(example_git_https_url, hvcs_domain=hvcs_domain) - assert hvcs.repo_name # Force the HVCS client to cache the repo name + hvcs = get_hvcs( + hvcs_client_name=hvcs_client_name, + origin_url=example_git_https_url, + hvcs_domain=hvcs_domain, + ) # Set tag format in configuration if tag_format_str is not None: update_pyproject_toml( - "tool.semantic_release.tag_format", tag_format_str + "tool.semantic_release.tag_format", + tag_format_str, + toml_file=pyproject_toml_file, ) # Set the build_command to create a wheel file (using the build_command_env version variable) build_result_file = ( - get_wheel_file("$NEW_VERSION") + get_wheel_file("$NEW_VERSION", pkg_name=package_name) if sys.platform != "win32" - else get_wheel_file("$Env:NEW_VERSION") + else get_wheel_file("$Env:NEW_VERSION", pkg_name=package_name) ) update_pyproject_toml( # NOTE: must work in both bash and Powershell @@ -1071,69 +1254,65 @@ def _build_configured_base_repo( # noqa: C901 New-Item -ItemType file -Path "$WHEEL_FILE" -Force | Select-Object OriginalPath """ ), + toml_file=pyproject_toml_file, ) # Set whether or not the initial release should be masked update_pyproject_toml( "tool.semantic_release.changelog.default_templates.mask_initial_release", mask_initial_release, + toml_file=pyproject_toml_file, ) # Apply configurations to pyproject.toml if extra_configs is not None: for key, value in extra_configs.items(): - update_pyproject_toml(key, value) + update_pyproject_toml(key, value, toml_file=pyproject_toml_file) return Path(dest_dir), hvcs - return _build_configured_base_repo + return _configure_base_repo @pytest.fixture(scope="session") -def separate_squashed_commit_def( - default_conventional_parser: ConventionalCommitParser, - default_emoji_parser: EmojiCommitParser, - default_scipy_parser: ScipyCommitParser, -) -> SeparateSquashedCommitDefFn: - message_parsers: dict[ - CommitConvention, - ConventionalCommitParser | EmojiCommitParser | ScipyCommitParser, - ] = { - "conventional": ConventionalCommitParser( - options=ConventionalCommitParserOptions( - **{ - **default_conventional_parser.options.__dict__, - "parse_squash_commits": True, - } - ) - ), - "emoji": EmojiCommitParser( - options=EmojiParserOptions( - **{ - **default_emoji_parser.options.__dict__, - "parse_squash_commits": True, - } - ) - ), - "scipy": ScipyCommitParser( - options=ScipyParserOptions( - **{ - **default_scipy_parser.options.__dict__, - "parse_squash_commits": True, - } - ) - ), - } +def separate_squashed_commit_def() -> SeparateSquashedCommitDefFn: + # default_conventional_parser: ConventionalCommitParser, + # default_emoji_parser: EmojiCommitParser, + # default_scipy_parser: ScipyCommitParser, + # message_parsers: dict[ + # CommitConvention, + # ConventionalCommitParser | EmojiCommitParser | ScipyCommitParser, + # ] = { + # "conventional": ConventionalCommitParser( + # options=ConventionalCommitParserOptions( + # **{ + # **default_conventional_parser.options.__dict__, + # "parse_squash_commits": True, + # } + # ) + # ), + # "emoji": EmojiCommitParser( + # options=EmojiParserOptions( + # **{ + # **default_emoji_parser.options.__dict__, + # "parse_squash_commits": True, + # } + # ) + # ), + # "scipy": ScipyCommitParser( + # options=ScipyParserOptions( + # **{ + # **default_scipy_parser.options.__dict__, + # "parse_squash_commits": True, + # } + # ) + # ), + # } def _separate_squashed_commit_def( squashed_commit_def: CommitDef, + parser: SquashedCommitSupportedParser, ) -> list[CommitDef]: - commit_type: CommitConvention = "conventional" - for parser_name, parser in message_parsers.items(): - if squashed_commit_def["type"] in parser.options.allowed_tags: - commit_type = parser_name - - parser = message_parsers[commit_type] if not hasattr(parser, "unsquash_commit_message"): return [squashed_commit_def] @@ -1141,8 +1320,11 @@ def _separate_squashed_commit_def( message=squashed_commit_def["msg"] ) + commit_num_gen = (i for i in count(start=1, step=1)) + return [ { + "cid": f"{squashed_commit_def['cid']}-{next(commit_num_gen)}", "msg": squashed_message, "type": parsed_result.type, "category": parsed_result.category, @@ -1166,13 +1348,17 @@ def _separate_squashed_commit_def( @pytest.fixture(scope="session") def convert_commit_spec_to_commit_def( - get_commit_def_of_conventional_commit: GetCommitDefFn, - get_commit_def_of_emoji_commit: GetCommitDefFn, - get_commit_def_of_scipy_commit: GetCommitDefFn, + get_commit_def_of_conventional_commit: GetCommitDefFn[ConventionalCommitParser], + get_commit_def_of_conventional_commit_monorepo: GetCommitDefFn[ + ConventionalCommitMonorepoParser + ], + get_commit_def_of_emoji_commit: GetCommitDefFn[EmojiCommitParser], + get_commit_def_of_scipy_commit: GetCommitDefFn[ScipyCommitParser], stable_now_date: datetime, ) -> ConvertCommitSpecToCommitDefFn: - message_parsers: dict[CommitConvention, GetCommitDefFn] = { + message_parsers = { "conventional": get_commit_def_of_conventional_commit, + "conventional-monorepo": get_commit_def_of_conventional_commit_monorepo, "emoji": get_commit_def_of_emoji_commit, "scipy": get_commit_def_of_scipy_commit, } @@ -1180,12 +1366,18 @@ def convert_commit_spec_to_commit_def( def _convert( commit_spec: CommitSpec, commit_type: CommitConvention, + parser: CommitParser[ParseResult, ParserOptions], + monorepo: bool = False, ) -> CommitDef: - parse_msg_fn = message_parsers[commit_type] + parse_msg_fn = cast( + "GetCommitDefFn[Any]", + message_parsers[f"{commit_type}{'-monorepo' if monorepo else ''}"], + ) # Extract the correct commit message for the commit type return { - **parse_msg_fn(commit_spec[commit_type]), + **parse_msg_fn(commit_spec[commit_type], parser=parser), + "cid": commit_spec["cid"], "datetime": ( commit_spec["datetime"] if "datetime" in commit_spec @@ -1204,9 +1396,14 @@ def convert_commit_specs_to_commit_defs( def _convert( commits: Sequence[CommitSpec], commit_type: CommitConvention, + parser: CommitParser[ParseResult, ParserOptions], + monorepo: bool = False, ) -> Sequence[CommitDef]: return [ - convert_commit_spec_to_commit_def(commit, commit_type) for commit in commits + convert_commit_spec_to_commit_def( + commit, commit_type, parser=parser, monorepo=monorepo + ) + for commit in commits ] return _convert @@ -1215,6 +1412,8 @@ def _convert( @pytest.fixture(scope="session") def build_repo_from_definition( # noqa: C901, its required and its just test code build_configured_base_repo: BuildRepoFn, + build_base_monorepo: BuildMonorepoFn, + configure_monorepo_package: BuildRepoFn, default_tag_format_str: str, create_release_tagged_commit: CreateReleaseFn, create_squash_merge_commit: CreateSquashMergeCommitFn, @@ -1222,50 +1421,46 @@ def build_repo_from_definition( # noqa: C901, its required and its just test co simulate_change_commits_n_rtn_changelog_entry: SimulateChangeCommitsNReturnChangelogEntryFn, simulate_default_changelog_creation: SimulateDefaultChangelogCreationFn, separate_squashed_commit_def: SeparateSquashedCommitDefFn, + get_hvcs: GetHvcsFn, + example_git_https_url: str, + get_parser_from_config_file: GetParserFromConfigFileFn, ) -> BuildRepoFromDefinitionFn: def expand_repo_construction_steps( acc: Sequence[RepoActions], step: RepoActions ) -> Sequence[RepoActions]: - return [ - *acc, - *( - reduce( - expand_repo_construction_steps, # type: ignore[arg-type] - step["details"]["pre_actions"], - [], - ) - if "pre_actions" in step["details"] - else [] - ), - step, - *( - reduce( - expand_repo_construction_steps, # type: ignore[arg-type] - step["details"]["post_actions"], - [], - ) - if "post_actions" in step["details"] - else [] - ), - ] + empty_tuple = cast("tuple[RepoActions, ...]", ()) + unpacked_pre_actions = reduce( + expand_repo_construction_steps, # type: ignore[arg-type] + step["details"].pop("pre_actions", empty_tuple), + empty_tuple, + ) + + unpacked_post_actions = reduce( + expand_repo_construction_steps, # type: ignore[arg-type] + step["details"].pop("post_actions", empty_tuple), + empty_tuple, + ) + + return (*acc, *unpacked_pre_actions, step, *unpacked_post_actions) def _build_repo_from_definition( # noqa: C901, its required and its just test code dest_dir: Path | str, repo_construction_steps: Sequence[RepoActions] ) -> Sequence[RepoActions]: completed_repo_steps: list[RepoActions] = [] - expanded_repo_construction_steps: Sequence[RepoActions] = reduce( - expand_repo_construction_steps, - repo_construction_steps, - [], + expanded_repo_construction_steps: tuple[RepoActions, ...] = tuple( + reduce( + expand_repo_construction_steps, # type: ignore[arg-type] + repo_construction_steps, + (), + ) ) - repo_dir = Path(dest_dir) + repo_dir = Path(dest_dir).resolve().absolute() + commit_type: CommitConvention = "conventional" hvcs: Github | Gitlab | Gitea | Bitbucket - tag_format_str: str - mask_initial_release: bool = True # Default as of v10 - current_commits: list[CommitDef] = [] - current_repo_def: RepoDefinition = {} + commit_cache: dict[str, CommitDef] = {} + current_repo_def: dict[Version, RepoVersionDef] = {} with temporary_working_directory(repo_dir): for step in expanded_repo_construction_steps: @@ -1273,11 +1468,12 @@ def _build_repo_from_definition( # noqa: C901, its required and its just test c action = step["action"] if action == RepoActionStep.CONFIGURE: - cfg_def: RepoActionConfigureDetails = step_result["details"] # type: ignore[assignment] + cfg_def = cast("RepoActionConfigureDetails", step_result["details"]) # Make sure the resulting build definition is complete with the default - tag_format_str = cfg_def["tag_format_str"] or default_tag_format_str - cfg_def["tag_format_str"] = tag_format_str + cfg_def["tag_format_str"] = ( + cfg_def["tag_format_str"] or default_tag_format_str + ) _, hvcs = build_configured_base_repo( # type: ignore[assignment] # TODO: fix the type error dest_dir, @@ -1293,14 +1489,67 @@ def _build_repo_from_definition( # noqa: C901, its required and its just test c ] }, ) - # Save configuration details for later steps - mask_initial_release = cfg_def["mask_initial_release"] - # Make sure the resulting build definition is complete with the default - cfg_def["tag_format_str"] = tag_format_str + elif action == RepoActionStep.CREATE_MONOREPO: + cfg_mr_def = cast( + "RepoActionCreateMonorepoDetails", step_result["details"] + ) + build_base_monorepo(dest_dir=repo_dir) + hvcs = get_hvcs( + hvcs_client_name=cfg_mr_def["hvcs_client_name"], + origin_url=cfg_mr_def.get("origin_url") + or example_git_https_url, + hvcs_domain=cfg_mr_def["hvcs_domain"], + ) + commit_type = cfg_mr_def["commit_type"] + + elif action == RepoActionStep.CONFIGURE_MONOREPO: + cfg_mr_pkg_def = cast( + "RepoActionConfigureMonorepoDetails", step_result["details"] + ) + configure_monorepo_package( + dest_dir=cfg_mr_pkg_def["package_dir"], + commit_type=commit_type, + hvcs_client_name=hvcs.__class__.__name__.lower(), + hvcs_domain=str(hvcs.hvcs_domain), + tag_format_str=cfg_mr_pkg_def["tag_format_str"], + extra_configs=cfg_mr_pkg_def["extra_configs"], + mask_initial_release=cfg_mr_pkg_def["mask_initial_release"], + package_name=cfg_mr_pkg_def["package_name"], + monorepo=True, + ) + + elif action == RepoActionStep.CHANGE_DIRECTORY: + change_dir_def = cast( + "RepoActionChangeDirectoryDetails", step_result["details"] + ) + if not ( + new_cwd := Path(change_dir_def["directory"]) + .resolve() + .absolute() + ).exists(): + msg = f"Directory {change_dir_def['directory']} does not exist." + raise NotADirectoryError(msg) + + # Helpful Transform to find the project root repo without needing to pass it around (ie '/' => repo_dir) + new_cwd = ( + repo_dir if str(new_cwd) == str(repo_dir.root) else new_cwd + ) + + if not new_cwd.is_dir(): + msg = f"Path {change_dir_def['directory']} is not a directory." + raise NotADirectoryError(msg) + + if not new_cwd.is_relative_to(repo_dir): + msg = f"Cannot change directory to '{new_cwd}' as it is not outside the repo directory '{repo_dir}'." + raise ValueError(msg) + + os.chdir(str(new_cwd)) elif action == RepoActionStep.MAKE_COMMITS: - mk_cmts_def: RepoActionMakeCommitsDetails = step_result["details"] # type: ignore[assignment] + mk_cmts_def = cast( + "RepoActionMakeCommitsDetails", step_result["details"] + ) # update the commit definitions with the repo hashes with Repo(repo_dir) as git_repo: @@ -1310,53 +1559,88 @@ def _build_repo_from_definition( # noqa: C901, its required and its just test c mk_cmts_def["commits"], ) ) - current_commits.extend( - filter( - lambda commit: commit["include_in_changelog"], - mk_cmts_def["commits"], - ) - ) + + commit_cache.update( + { + commit["cid"]: commit + for commit in mk_cmts_def["commits"] + if commit["include_in_changelog"] + } + ) elif action == RepoActionStep.WRITE_CHANGELOGS: - w_chlgs_def: RepoActionWriteChangelogsDetails = step["details"] # type: ignore[assignment] + w_chlgs_def = cast( + "RepoActionWriteChangelogsDetails", step["details"] + ) # Mark the repo definition with the latest stored commits for the upcoming release new_version = w_chlgs_def["new_version"] current_repo_def.update( - {new_version: {"commits": [*current_commits]}} + { + new_version: { + "commits": [ + *filter( + None, + ( + commit_cache.get(commit_id) + for commit_id in w_chlgs_def["commit_ids"] + ), + ) + ] + } + } ) - current_commits.clear() + + # in order to support monorepo changelogs we must filter and map the stored repo definition + # to match only the sub-package's versions which are identified by matching tag formats + filtered_repo_def_4_changelog: RepoDefinition = { + str(version): repo_def + for version, repo_def in current_repo_def.items() + if version.tag_format == new_version.tag_format + } # Write each changelog with the current repo definition - for changelog_file_def in w_chlgs_def["dest_files"]: - simulate_default_changelog_creation( - current_repo_def, - hvcs=hvcs, - dest_file=repo_dir.joinpath(changelog_file_def["path"]), - output_format=changelog_file_def["format"], - mask_initial_release=mask_initial_release, - max_version=w_chlgs_def.get("max_version", None), - ) + with Repo(repo_dir) as git_repo: + for changelog_file_def in w_chlgs_def["dest_files"]: + changelog_file = repo_dir.joinpath( + changelog_file_def["path"] + ) + simulate_default_changelog_creation( + filtered_repo_def_4_changelog, + hvcs=hvcs, + dest_file=changelog_file, + output_format=changelog_file_def["format"], + mask_initial_release=changelog_file_def[ + "mask_initial_release" + ], + max_version=w_chlgs_def.get("max_version"), + ) + + git_repo.git.add(str(changelog_file), force=True) elif action == RepoActionStep.RELEASE: - release_def: RepoActionReleaseDetails = step["details"] # type: ignore[assignment] + release_def = cast("RepoActionReleaseDetails", step["details"]) with Repo(repo_dir) as git_repo: create_release_tagged_commit( git_repo, version=release_def["version"], - tag_format=tag_format_str, + tag_format=release_def.get( + "tag_format", default_tag_format_str + ), timestamp=release_def["datetime"], + version_py_file=release_def.get("version_py_file", ""), + commit_message_format=release_def.get( + "commit_message_format", COMMIT_MESSAGE + ), ) elif action == RepoActionStep.GIT_CHECKOUT: - ckout_def: RepoActionGitCheckoutDetails = step["details"] # type: ignore[assignment] + ckout_def = cast("RepoActionGitCheckoutDetails", step["details"]) with Repo(repo_dir) as git_repo: if "create_branch" in ckout_def: - create_branch_def: RepoActionGitCheckoutCreateBranch = ( - ckout_def["create_branch"] - ) + create_branch_def = ckout_def["create_branch"] start_head = git_repo.heads[ create_branch_def["start_branch"] ] @@ -1370,7 +1654,9 @@ def _build_repo_from_definition( # noqa: C901, its required and its just test c git_repo.heads[ckout_def["branch"]].checkout() elif action == RepoActionStep.GIT_SQUASH: - squash_def: RepoActionGitSquashDetails = step_result["details"] # type: ignore[assignment] + squash_def = cast( + "RepoActionGitSquashDetails", step_result["details"] + ) # Update the commit definition with the repo hash with Repo(repo_dir) as git_repo: @@ -1381,26 +1667,35 @@ def _build_repo_from_definition( # noqa: C901, its required and its just test c strategy_option=squash_def["strategy_option"], ) if squash_def["commit_def"]["include_in_changelog"]: - current_commits.extend( - separate_squashed_commit_def( - squashed_commit_def=squash_def["commit_def"], - ) + commit_cache.update( + { + squashed_commit_def["cid"]: squashed_commit_def + for squashed_commit_def in separate_squashed_commit_def( + squashed_commit_def=squash_def["commit_def"], + parser=cast( + "SquashedCommitSupportedParser", + get_parser_from_config_file( + file=squash_def["config_file"], + ), + ), + ) + } ) elif action == RepoActionStep.GIT_MERGE: - this_step: RepoActionGitMerge = step_result # type: ignore[assignment] + this_step = cast("RepoActionGitMerge", step_result) with Repo(repo_dir) as git_repo: if this_step["details"]["fast_forward"]: - ff_merge_def: RepoActionGitFFMergeDetails = this_step[ # type: ignore[assignment] - "details" - ] + ff_merge_def = cast( + "RepoActionGitFFMergeDetails", this_step["details"] + ) git_repo.git.merge(ff_merge_def["branch_name"], ff=True) else: - merge_def: RepoActionGitMergeDetails = this_step[ # type: ignore[assignment] - "details" - ] + merge_def = cast( + "RepoActionGitMergeDetails", this_step["details"] + ) # Update the commit definition with the repo hash merge_def["commit_def"] = create_merge_commit( @@ -1413,7 +1708,13 @@ def _build_repo_from_definition( # noqa: C901, its required and its just test c ), ) if merge_def["commit_def"]["include_in_changelog"]: - current_commits.append(merge_def["commit_def"]) + commit_cache.update( + { + merge_def["commit_def"]["cid"]: merge_def[ + "commit_def" + ] + } + ) else: raise ValueError(f"Unknown action: {action}") @@ -1445,10 +1746,15 @@ def _get_cfg_value_from_def( @pytest.fixture(scope="session") -def get_versions_from_repo_build_def() -> GetVersionsFromRepoBuildDefFn: - def _get_versions(repo_def: Sequence[RepoActions]) -> Sequence[str]: +def get_versions_from_repo_build_def( + default_tag_format_str: str, +) -> GetVersionsFromRepoBuildDefFn: + def _get_versions(repo_def: Sequence[RepoActions]) -> Sequence[Version]: return [ - step["details"]["version"] + Version.parse( + step["details"]["version"], + tag_format=step["details"].get("tag_format", default_tag_format_str), + ) for step in repo_def if step["action"] == RepoActionStep.RELEASE ] @@ -1518,16 +1824,16 @@ def split_repo_actions_by_release_tags( ) -> SplitRepoActionsByReleaseTagsFn: def _split_repo_actions_by_release_tags( repo_definition: Sequence[RepoActions], - tag_format_str: str, - ) -> dict[str, list[RepoActions]]: - releasetags_2_steps: dict[str, list[RepoActions]] = { - "": [], + ) -> dict[Version | Literal["Unreleased"] | None, list[RepoActions]]: + releasetags_2_steps: dict[ + Version | Literal["Unreleased"] | None, list[RepoActions] + ] = { + None: [], } # Create generator for next release tags next_release_tag_gen = ( - tag_format_str.format(version=version) - for version in get_versions_from_repo_build_def(repo_definition) + version for version in get_versions_from_repo_build_def(repo_definition) ) # initialize the first release tag @@ -1536,8 +1842,15 @@ def _split_repo_actions_by_release_tags( # Loop through all actions and split them by release tags for step in repo_definition: - if step["action"] == RepoActionStep.CONFIGURE: - releasetags_2_steps[""].append(step) + if any( + step["action"] == action + for action in [ + RepoActionStep.CONFIGURE, + RepoActionStep.CREATE_MONOREPO, + RepoActionStep.CONFIGURE_MONOREPO, + ] + ): + releasetags_2_steps[None].append(step) continue if step["action"] == RepoActionStep.WRITE_CHANGELOGS: @@ -1553,19 +1866,17 @@ def _split_repo_actions_by_release_tags( curr_release_tag = "Unreleased" releasetags_2_steps[curr_release_tag] = [] - # Run filter on any non-action steps of Unreleased - releasetags_2_steps["Unreleased"] = list( - filter( - lambda step: step["action"] != RepoActionStep.GIT_CHECKOUT, - releasetags_2_steps["Unreleased"], - ) - ) + insignificant_actions = [ + RepoActionStep.GIT_CHECKOUT, + RepoActionStep.CHANGE_DIRECTORY, + ] - # Remove Unreleased if there are no steps in an Unreleased section - if ( - "Unreleased" in releasetags_2_steps - and not releasetags_2_steps["Unreleased"] - ): + # Remove Unreleased if there are no significant steps in an Unreleased section + if "Unreleased" in releasetags_2_steps and not [ + step + for step in releasetags_2_steps["Unreleased"] + if step["action"] not in insignificant_actions + ]: del releasetags_2_steps["Unreleased"] # Return all actions split up by release tags diff --git a/tests/fixtures/monorepos/__init__.py b/tests/fixtures/monorepos/__init__.py new file mode 100644 index 000000000..4beac474e --- /dev/null +++ b/tests/fixtures/monorepos/__init__.py @@ -0,0 +1,3 @@ +from tests.fixtures.monorepos.example_monorepo import * +from tests.fixtures.monorepos.git_monorepo import * +from tests.fixtures.monorepos.github_flow import * diff --git a/tests/fixtures/monorepos/example_monorepo.py b/tests/fixtures/monorepos/example_monorepo.py new file mode 100644 index 000000000..8b62a6f8e --- /dev/null +++ b/tests/fixtures/monorepos/example_monorepo.py @@ -0,0 +1,520 @@ +from __future__ import annotations + +from pathlib import Path +from textwrap import dedent +from typing import TYPE_CHECKING + +import pytest + +# NOTE: use backport with newer API +import tests.conftest +import tests.const +import tests.fixtures.example_project +import tests.util +from tests.const import ( + EXAMPLE_PROJECT_NAME, + EXAMPLE_PROJECT_VERSION, + EXAMPLE_PYPROJECT_TOML_CONTENT, + EXAMPLE_RELEASE_NOTES_TEMPLATE, +) +from tests.util import copy_dir_tree, temporary_working_directory + +if TYPE_CHECKING: + from typing import Any, Protocol, Sequence + + from tests.conftest import ( + BuildRepoOrCopyCacheFn, + GetMd5ForSetOfFilesFn, + ) + from tests.fixtures.example_project import ( + UpdatePyprojectTomlFn, + UpdateVersionPyFileFn, + ) + from tests.fixtures.git_repo import RepoActions + + # class GetWheelFileFn(Protocol): + # def __call__(self, version_str: str) -> Path: ... + + class UpdatePkgPyprojectTomlFn(Protocol): + def __call__(self, pkg_name: str, setting: str, value: Any) -> None: ... + + class UseCommonReleaseNotesTemplateFn(Protocol): + def __call__(self) -> None: ... + + +@pytest.fixture(scope="session") +def deps_files_4_example_monorepo() -> list[Path]: + return [ + # This file + Path(__file__).absolute(), + # because of imports + Path(tests.const.__file__).absolute(), + Path(tests.util.__file__).absolute(), + # because of the fixtures + Path(tests.conftest.__file__).absolute(), + Path(tests.fixtures.example_project.__file__).absolute(), + ] + + +@pytest.fixture(scope="session") +def build_spec_hash_4_example_monorepo( + get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, + deps_files_4_example_monorepo: list[Path], +) -> str: + # Generates a hash of the build spec to set when to invalidate the cache + return get_md5_for_set_of_files(deps_files_4_example_monorepo) + + +@pytest.fixture(scope="session") +def cached_example_monorepo( + build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, + monorepo_pkg1_dir: Path, + monorepo_pkg2_dir: Path, + monorepo_pkg1_version_py_file: Path, + monorepo_pkg2_version_py_file: Path, + monorepo_pkg1_pyproject_toml_file: Path, + monorepo_pkg2_pyproject_toml_file: Path, + build_spec_hash_4_example_monorepo: str, + update_version_py_file: UpdateVersionPyFileFn, + update_pyproject_toml: UpdatePyprojectTomlFn, +) -> Path: + """ + Initializes the example monorepo project. DO NOT USE DIRECTLY + + Use the `init_example_monorepo` fixture instead. + """ + + def _build_project(cached_project_path: Path) -> Sequence[RepoActions]: + # purposefully a relative path + # example_dir = version_py_file.parent + gitignore_contents = dedent( + f""" + *.pyc + /{monorepo_pkg1_version_py_file} + /{monorepo_pkg2_version_py_file} + dist/ + """ + ).lstrip() + init_py_contents = dedent( + ''' + """An example package with a very informative docstring.""" + from ._version import __version__ + + def hello_world() -> None: + print("{pkg_name} Hello World") + ''' + ).lstrip() + + with temporary_working_directory(cached_project_path): + update_version_py_file( + version=EXAMPLE_PROJECT_VERSION, + version_file=monorepo_pkg1_version_py_file, + ) + update_version_py_file( + version=EXAMPLE_PROJECT_VERSION, + version_file=monorepo_pkg2_version_py_file, + ) + + file_2_contents: list[tuple[str | Path, str]] = [ + ( + monorepo_pkg1_version_py_file.parent / "__init__.py", + init_py_contents.format(pkg_name="Pkg 1:"), + ), + ( + monorepo_pkg2_version_py_file.parent / "__init__.py", + init_py_contents.format(pkg_name="Pkg 2:"), + ), + (".gitignore", gitignore_contents), + (monorepo_pkg1_pyproject_toml_file, EXAMPLE_PYPROJECT_TOML_CONTENT), + (monorepo_pkg2_pyproject_toml_file, EXAMPLE_PYPROJECT_TOML_CONTENT), + ] + + for file, contents in file_2_contents: + abs_filepath = cached_project_path.joinpath(file).resolve() + # make sure the parent directory exists + abs_filepath.parent.mkdir(parents=True, exist_ok=True) + # write file contents + abs_filepath.write_text(contents) + + config_updates: list[tuple[str, Any, Path]] = [ + ( + "tool.poetry.name", + "pkg-1", + cached_project_path / monorepo_pkg1_pyproject_toml_file, + ), + ( + "tool.poetry.name", + "pkg-2", + cached_project_path / monorepo_pkg2_pyproject_toml_file, + ), + ( + "tool.semantic_release.version_variables", + [ + f"{monorepo_pkg1_version_py_file.relative_to(monorepo_pkg1_dir)}:__version__" + ], + cached_project_path / monorepo_pkg1_pyproject_toml_file, + ), + ( + "tool.semantic_release.version_variables", + [ + f"{monorepo_pkg2_version_py_file.relative_to(monorepo_pkg2_dir)}:__version__" + ], + cached_project_path / monorepo_pkg2_pyproject_toml_file, + ), + ] + + for setting, value, toml_file in config_updates: + update_pyproject_toml( + setting=setting, + value=value, + toml_file=toml_file, + ) + + # This is a special build, we don't expose the Repo Actions to the caller + return [] + + # End of _build_project() + + return build_repo_or_copy_cache( + repo_name="example_monorepo", + build_spec_hash=build_spec_hash_4_example_monorepo, + build_repo_func=_build_project, + ) + + +@pytest.fixture +def init_example_monorepo( + example_project_dir: tests.fixtures.example_project.ExProjectDir, + cached_example_monorepo: Path, + change_to_ex_proj_dir: None, +) -> None: + """This fixture initializes the example project in the current test's project directory.""" + if not cached_example_monorepo.exists(): + raise RuntimeError( + f"Unable to find cached project files for {EXAMPLE_PROJECT_NAME}" + ) + + # Copy the cached project files into the current test's project directory + copy_dir_tree(cached_example_monorepo, example_project_dir) + + +@pytest.fixture +def monorepo_project_w_common_release_notes_template( + init_example_monorepo: None, + monorepo_use_common_release_notes_template: UseCommonReleaseNotesTemplateFn, +) -> None: + monorepo_use_common_release_notes_template() + + +@pytest.fixture(scope="session") +def monorepo_pkg1_name() -> str: + return "pkg1" + + +@pytest.fixture(scope="session") +def monorepo_pkg2_name() -> str: + return "pkg2" + + +@pytest.fixture(scope="session") +def monorepo_pkg_dir_pattern() -> str: + return str(Path("packages", "{package_name}")) + + +@pytest.fixture(scope="session") +def monorepo_pkg1_dir( + monorepo_pkg1_name: str, + monorepo_pkg_dir_pattern: str, +) -> str: + return monorepo_pkg_dir_pattern.format(package_name=monorepo_pkg1_name) + + +@pytest.fixture(scope="session") +def monorepo_pkg2_dir( + monorepo_pkg2_name: str, + monorepo_pkg_dir_pattern: str, +) -> str: + return monorepo_pkg_dir_pattern.format(package_name=monorepo_pkg2_name) + + +@pytest.fixture(scope="session") +def monorepo_pkg_version_py_file_pattern(monorepo_pkg_dir_pattern: str) -> str: + return str(Path(monorepo_pkg_dir_pattern, "src", "{package_name}", "_version.py")) + + +@pytest.fixture(scope="session") +def monorepo_pkg1_version_py_file( + monorepo_pkg1_name: str, + monorepo_pkg_version_py_file_pattern: str, +) -> Path: + return Path( + monorepo_pkg_version_py_file_pattern.format(package_name=monorepo_pkg1_name) + ) + + +@pytest.fixture(scope="session") +def monorepo_pkg2_version_py_file( + monorepo_pkg2_name: str, + monorepo_pkg_version_py_file_pattern: str, +) -> Path: + return Path( + monorepo_pkg_version_py_file_pattern.format(package_name=monorepo_pkg2_name) + ) + + +@pytest.fixture(scope="session") +def monorepo_pkg_pyproject_toml_file_pattern(monorepo_pkg_dir_pattern: str) -> str: + return str(Path(monorepo_pkg_dir_pattern, "pyproject.toml")) + + +@pytest.fixture(scope="session") +def monorepo_pkg1_pyproject_toml_file( + monorepo_pkg1_name: str, + monorepo_pkg_pyproject_toml_file_pattern: str, +) -> Path: + return Path( + monorepo_pkg_pyproject_toml_file_pattern.format(package_name=monorepo_pkg1_name) + ) + + +@pytest.fixture(scope="session") +def monorepo_pkg2_pyproject_toml_file( + monorepo_pkg2_name: str, + monorepo_pkg_pyproject_toml_file_pattern: str, +) -> Path: + return Path( + monorepo_pkg_pyproject_toml_file_pattern.format(package_name=monorepo_pkg2_name) + ) + + +@pytest.fixture(scope="session") +def monorepo_pkg_dist_dir_pattern(monorepo_pkg_dir_pattern: str) -> str: + return str(Path(monorepo_pkg_dir_pattern, "dist")) + + +@pytest.fixture(scope="session") +def monorepo_pkg1_dist_dir( + monorepo_pkg1_name: str, + monorepo_pkg_dist_dir_pattern: str, +) -> Path: + return Path(monorepo_pkg_dist_dir_pattern.format(package_name=monorepo_pkg1_name)) + + +@pytest.fixture(scope="session") +def monorepo_pkg2_dist_dir( + monorepo_pkg2_name: str, + monorepo_pkg_dist_dir_pattern: str, +) -> Path: + return Path(monorepo_pkg_dist_dir_pattern.format(package_name=monorepo_pkg2_name)) + + +@pytest.fixture(scope="session") +def monorepo_pkg_changelog_md_file_pattern(monorepo_pkg_dir_pattern: str) -> str: + return str(Path(monorepo_pkg_dir_pattern, "CHANGELOG.md")) + + +@pytest.fixture(scope="session") +def monorepo_pkg1_changelog_md_file( + monorepo_pkg1_name: str, + monorepo_pkg_changelog_md_file_pattern: str, +) -> Path: + return Path( + monorepo_pkg_changelog_md_file_pattern.format(package_name=monorepo_pkg1_name) + ) + + +@pytest.fixture(scope="session") +def monorepo_pkg2_changelog_md_file( + monorepo_pkg2_name: str, + monorepo_pkg_changelog_md_file_pattern: str, +) -> Path: + return Path( + monorepo_pkg_changelog_md_file_pattern.format(package_name=monorepo_pkg2_name) + ) + + +@pytest.fixture(scope="session") +def monorepo_pkg_changelog_rst_file_pattern(monorepo_pkg_dir_pattern: str) -> str: + return str(Path(monorepo_pkg_dir_pattern, "CHANGELOG.rst")) + + +@pytest.fixture(scope="session") +def monorepo_pkg1_changelog_rst_file( + monorepo_pkg1_name: str, + monorepo_pkg_changelog_rst_file_pattern: str, +) -> Path: + return Path( + monorepo_pkg_changelog_rst_file_pattern.format(package_name=monorepo_pkg1_name) + ) + + +@pytest.fixture(scope="session") +def monorepo_pkg2_changelog_rst_file( + monorepo_pkg2_name: str, + monorepo_pkg_changelog_rst_file_pattern: str, +) -> Path: + return Path( + monorepo_pkg_changelog_rst_file_pattern.format(package_name=monorepo_pkg2_name) + ) + + +# @pytest.fixture(scope="session") +# def get_wheel_file(dist_dir: Path) -> GetWheelFileFn: +# def _get_wheel_file(version_str: str) -> Path: +# return dist_dir / f"{EXAMPLE_PROJECT_NAME}-{version_str}-py3-none-any.whl" + +# return _get_wheel_file + + +@pytest.fixture +def example_monorepo_pkg_dir_pattern( + tmp_path: Path, + monorepo_pkg_dir_pattern: Path, +) -> str: + return str(tmp_path.resolve() / monorepo_pkg_dir_pattern) + + +@pytest.fixture +def example_monorepo_pkg1_dir( + monorepo_pkg1_name: str, + example_monorepo_pkg_dir_pattern: str, +) -> Path: + return Path( + example_monorepo_pkg_dir_pattern.format(package_name=monorepo_pkg1_name) + ) + + +@pytest.fixture +def example_monorepo_pkg2_dir( + monorepo_pkg2_name: str, + example_monorepo_pkg_dir_pattern: str, +) -> Path: + return Path( + example_monorepo_pkg_dir_pattern.format(package_name=monorepo_pkg2_name) + ) + + +@pytest.fixture +def monorepo_use_common_release_notes_template( + example_project_template_dir: Path, + changelog_template_dir: Path, + update_pyproject_toml: UpdatePyprojectTomlFn, + monorepo_pkg1_pyproject_toml_file: Path, + monorepo_pkg2_pyproject_toml_file: Path, +) -> UseCommonReleaseNotesTemplateFn: + config_setting_template_dir = "tool.semantic_release.changelog.template_dir" + + def _use_release_notes_template() -> None: + update_pyproject_toml( + setting=config_setting_template_dir, + value=str( + Path( + *( + "../" + for _ in list(Path(monorepo_pkg1_pyproject_toml_file).parents)[ + :-1 + ] + ), + changelog_template_dir, + ) + ), + toml_file=monorepo_pkg1_pyproject_toml_file, + ) + + update_pyproject_toml( + setting=config_setting_template_dir, + value=str( + Path( + *( + "../" + for _ in list(Path(monorepo_pkg2_pyproject_toml_file).parents)[ + :-1 + ] + ), + changelog_template_dir, + ) + ), + toml_file=monorepo_pkg2_pyproject_toml_file, + ) + + example_project_template_dir.mkdir(parents=True, exist_ok=True) + release_notes_j2 = example_project_template_dir / ".release_notes.md.j2" + release_notes_j2.write_text(EXAMPLE_RELEASE_NOTES_TEMPLATE) + + return _use_release_notes_template + + +# @pytest.fixture +# def example_pyproject_toml( +# example_project_dir: ExProjectDir, +# pyproject_toml_file: Path, +# ) -> Path: +# return example_project_dir / pyproject_toml_file + + +# @pytest.fixture +# def example_dist_dir( +# example_project_dir: ExProjectDir, +# dist_dir: Path, +# ) -> Path: +# return example_project_dir / dist_dir + + +# @pytest.fixture +# def example_project_wheel_file( +# example_dist_dir: Path, +# get_wheel_file: GetWheelFileFn, +# ) -> Path: +# return example_dist_dir / get_wheel_file(EXAMPLE_PROJECT_VERSION) + + +# Note this is just the path and the content may change +# @pytest.fixture +# def example_changelog_md( +# example_project_dir: ExProjectDir, +# changelog_md_file: Path, +# ) -> Path: +# return example_project_dir / changelog_md_file + + +# Note this is just the path and the content may change +# @pytest.fixture +# def example_changelog_rst( +# example_project_dir: ExProjectDir, +# changelog_rst_file: Path, +# ) -> Path: +# return example_project_dir / changelog_rst_file + + +# @pytest.fixture +# def example_project_template_dir( +# example_project_dir: ExProjectDir, +# changelog_template_dir: Path, +# ) -> Path: +# return example_project_dir / changelog_template_dir + + +@pytest.fixture(scope="session") +def update_pkg_pyproject_toml( + update_pyproject_toml: UpdatePyprojectTomlFn, + monorepo_pkg_pyproject_toml_file_pattern: str, +) -> UpdatePkgPyprojectTomlFn: + """Update the pyproject.toml file with the given content.""" + + def _update_pyproject_toml(pkg_name: str, setting: str, value: Any) -> None: + toml_file = Path( + monorepo_pkg_pyproject_toml_file_pattern.format(package_name=pkg_name) + ).resolve() + + if not toml_file.exists(): + raise FileNotFoundError( + f"pyproject.toml file for package {pkg_name} not found at {toml_file}" + ) + + update_pyproject_toml( + setting=setting, + value=value, + toml_file=toml_file, + ) + + return _update_pyproject_toml diff --git a/tests/fixtures/monorepos/git_monorepo.py b/tests/fixtures/monorepos/git_monorepo.py new file mode 100644 index 000000000..c88fbdb29 --- /dev/null +++ b/tests/fixtures/monorepos/git_monorepo.py @@ -0,0 +1,206 @@ +from __future__ import annotations + +from pathlib import Path +from shutil import rmtree +from typing import TYPE_CHECKING + +import pytest +from git import Repo + +import tests.conftest +import tests.const +import tests.fixtures.git_repo +import tests.util +from tests.const import ( + DEFAULT_BRANCH_NAME, + EXAMPLE_HVCS_DOMAIN, + EXAMPLE_PROJECT_NAME, +) +from tests.util import copy_dir_tree + +if TYPE_CHECKING: + from typing import Protocol, Sequence + + from git import Actor + + from semantic_release.hvcs import HvcsBase + + from tests.conftest import ( + BuildRepoOrCopyCacheFn, + GetMd5ForSetOfFilesFn, + RepoActions, + ) + from tests.fixtures.git_repo import ( + BuildRepoFn, + CommitConvention, + TomlSerializableTypes, + ) + + class BuildMonorepoFn(Protocol): + def __call__(self, dest_dir: Path | str) -> Path: ... + + +@pytest.fixture(scope="session") +def deps_files_4_example_git_monorepo( + deps_files_4_example_monorepo: list[Path], +) -> list[Path]: + return [ + *deps_files_4_example_monorepo, + # This file + Path(__file__).absolute(), + # because of imports + Path(tests.const.__file__).absolute(), + Path(tests.util.__file__).absolute(), + # because of the fixtures + Path(tests.conftest.__file__).absolute(), + Path(tests.fixtures.git_repo.__file__).absolute(), + ] + + +@pytest.fixture(scope="session") +def build_spec_hash_4_example_git_monorepo( + get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, + deps_files_4_example_git_monorepo: list[Path], +) -> str: + # Generates a hash of the build spec to set when to invalidate the cache + return get_md5_for_set_of_files(deps_files_4_example_git_monorepo) + + +@pytest.fixture(scope="session") +def cached_example_git_monorepo( + build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, + build_spec_hash_4_example_git_monorepo: str, + cached_example_monorepo: Path, + example_git_https_url: str, + commit_author: Actor, +) -> Path: + """ + Initializes an example monorepo project with git. DO NOT USE DIRECTLY. + + Use a `repo_*` fixture instead. This creates a default + base repository, all settings can be changed later through from the + example_project_git_repo fixture's return object and manual adjustment. + """ + + def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: + if not cached_example_monorepo.exists(): + raise RuntimeError("Unable to find cached monorepo files") + + # make a copy of the example monorepo as a base + copy_dir_tree(cached_example_monorepo, cached_repo_path) + + # initialize git repo (open and close) + # NOTE: We don't want to hold the repo object open for the entire test session, + # the implementation on Windows holds some file descriptors open until close is called. + with Repo.init(cached_repo_path) as repo: + rmtree(str(Path(repo.git_dir, "hooks"))) + # Without this the global config may set it to "master", we want consistency + repo.git.branch("-M", DEFAULT_BRANCH_NAME) + with repo.config_writer("repository") as config: + config.set_value("user", "name", commit_author.name) + config.set_value("user", "email", commit_author.email) + config.set_value("commit", "gpgsign", False) + config.set_value("tag", "gpgsign", False) + + repo.create_remote(name="origin", url=example_git_https_url) + + # make sure all base files are in index to enable initial commit + repo.index.add(("*", ".gitignore")) + + # This is a special build, we don't expose the Repo Actions to the caller + return [] + + # End of _build_repo() + + return build_repo_or_copy_cache( + repo_name=cached_example_git_monorepo.__name__.split("_", maxsplit=1)[1], + build_spec_hash=build_spec_hash_4_example_git_monorepo, + build_repo_func=_build_repo, + ) + + +@pytest.fixture(scope="session") +def file_in_pkg_pattern(file_in_repo: str, monorepo_pkg_dir_pattern: str) -> str: + return str(Path(monorepo_pkg_dir_pattern) / file_in_repo) + + +@pytest.fixture(scope="session") +def file_in_monorepo_pkg1( + monorepo_pkg1_name: str, + file_in_pkg_pattern: str, +) -> Path: + return Path(file_in_pkg_pattern.format(pkg_name=monorepo_pkg1_name)) + + +@pytest.fixture(scope="session") +def file_in_monorepo_pkg2( + monorepo_pkg2_name: str, + file_in_pkg_pattern: str, +) -> Path: + return Path(file_in_pkg_pattern.format(pkg_name=monorepo_pkg2_name)) + + +@pytest.fixture(scope="session") +def build_base_monorepo( # noqa: C901 + cached_example_git_monorepo: Path, +) -> BuildMonorepoFn: + """ + This fixture is intended to simplify repo scenario building by initially + creating the repo but also configuring semantic_release in the pyproject.toml + for when the test executes semantic_release. It returns a function so that + derivative fixtures can call this fixture with individual parameters. + """ + + def _build_configured_base_monorepo(dest_dir: Path | str) -> Path: + if not cached_example_git_monorepo.exists(): + raise RuntimeError("Unable to find cached git project files!") + + # Copy the cached git project the dest directory + copy_dir_tree(cached_example_git_monorepo, dest_dir) + + return Path(dest_dir) + + return _build_configured_base_monorepo + + +@pytest.fixture(scope="session") +def configure_monorepo_package( # noqa: C901 + configure_base_repo: BuildRepoFn, +) -> BuildRepoFn: + """ + This fixture is intended to simplify repo scenario building by initially + creating the repo but also configuring semantic_release in the pyproject.toml + for when the test executes semantic_release. It returns a function so that + derivative fixtures can call this fixture with individual parameters. + """ + + def _configure( # noqa: C901 + dest_dir: Path | str, + commit_type: CommitConvention = "conventional", + hvcs_client_name: str = "github", + hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, + tag_format_str: str | None = None, + extra_configs: dict[str, TomlSerializableTypes] | None = None, + mask_initial_release: bool = True, # Default as of v10 + package_name: str = EXAMPLE_PROJECT_NAME, + monorepo: bool = True, + ) -> tuple[Path, HvcsBase]: + if not monorepo: + raise ValueError("This fixture is only for monorepo packages!") + + if not Path(dest_dir).exists(): + raise RuntimeError(f"Destination directory {dest_dir} does not exist!") + + return configure_base_repo( + dest_dir=dest_dir, + commit_type=commit_type, + hvcs_client_name=hvcs_client_name, + hvcs_domain=hvcs_domain, + tag_format_str=tag_format_str, + extra_configs=extra_configs, + mask_initial_release=mask_initial_release, + package_name=package_name, + monorepo=monorepo, + ) + + return _configure diff --git a/tests/fixtures/monorepos/github_flow/__init__.py b/tests/fixtures/monorepos/github_flow/__init__.py new file mode 100644 index 000000000..1475e75a8 --- /dev/null +++ b/tests/fixtures/monorepos/github_flow/__init__.py @@ -0,0 +1 @@ +from tests.fixtures.monorepos.github_flow.monorepo_w_default_release import * diff --git a/tests/fixtures/monorepos/github_flow/monorepo_w_default_release.py b/tests/fixtures/monorepos/github_flow/monorepo_w_default_release.py new file mode 100644 index 000000000..706afa1c7 --- /dev/null +++ b/tests/fixtures/monorepos/github_flow/monorepo_w_default_release.py @@ -0,0 +1,946 @@ +from __future__ import annotations + +from datetime import timedelta +from itertools import count +from pathlib import Path +from textwrap import dedent +from typing import TYPE_CHECKING, cast + +import pytest + +from semantic_release.cli.config import ChangelogOutputFormat +from semantic_release.commit_parser.conventional.options_monorepo import ( + ConventionalCommitMonorepoParserOptions, +) +from semantic_release.commit_parser.conventional.parser_monorepo import ( + ConventionalCommitMonorepoParser, +) +from semantic_release.version.version import Version + +import tests.conftest +import tests.const +import tests.util +from tests.const import ( + DEFAULT_BRANCH_NAME, + EXAMPLE_HVCS_DOMAIN, + INITIAL_COMMIT_MESSAGE, + RepoActionStep, +) + +if TYPE_CHECKING: + from typing import Sequence + + from semantic_release.commit_parser._base import CommitParser, ParserOptions + from semantic_release.commit_parser.token import ParseResult + + from tests.conftest import ( + GetCachedRepoDataFn, + GetMd5ForSetOfFilesFn, + GetStableDateNowFn, + ) + from tests.fixtures.example_project import ExProjectDir + from tests.fixtures.git_repo import ( + BuildRepoFromDefinitionFn, + BuildRepoOrCopyCacheFn, + BuildSpecificRepoFn, + BuiltRepoResult, + CommitConvention, + CommitSpec, + ConvertCommitSpecsToCommitDefsFn, + ConvertCommitSpecToCommitDefFn, + ExProjectGitRepoFn, + FormatGitHubSquashCommitMsgFn, + GetRepoDefinitionFn, + RepoActionChangeDirectory, + RepoActions, + RepoActionWriteChangelogsDestFile, + TomlSerializableTypes, + ) + + +@pytest.fixture(scope="session") +def deps_files_4_github_flow_monorepo_w_default_release_channel( + deps_files_4_example_git_monorepo: list[Path], +) -> list[Path]: + return [ + *deps_files_4_example_git_monorepo, + # This file + Path(__file__).absolute(), + # because of imports + Path(tests.const.__file__).absolute(), + Path(tests.util.__file__).absolute(), + # because of the fixtures + Path(tests.conftest.__file__).absolute(), + ] + + +@pytest.fixture(scope="session") +def build_spec_hash_4_github_flow_monorepo_w_default_release_channel( + get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, + deps_files_4_github_flow_monorepo_w_default_release_channel: list[Path], +) -> str: + # Generates a hash of the build spec to set when to invalidate the cache + return get_md5_for_set_of_files( + deps_files_4_github_flow_monorepo_w_default_release_channel + ) + + +@pytest.fixture(scope="session") +def get_repo_definition_4_github_flow_monorepo_w_default_release_channel( + convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, + convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, + format_squash_commit_msg_github: FormatGitHubSquashCommitMsgFn, + monorepo_pkg1_changelog_md_file: Path, + monorepo_pkg1_changelog_rst_file: Path, + monorepo_pkg2_changelog_md_file: Path, + monorepo_pkg2_changelog_rst_file: Path, + monorepo_pkg1_name: str, + monorepo_pkg2_name: str, + monorepo_pkg1_dir: Path, + monorepo_pkg2_dir: Path, + monorepo_pkg1_version_py_file: Path, + monorepo_pkg2_version_py_file: Path, + monorepo_pkg1_pyproject_toml_file: Path, + monorepo_pkg2_pyproject_toml_file: Path, + stable_now_date: GetStableDateNowFn, + default_tag_format_str: str, +) -> GetRepoDefinitionFn: + """ + Builds a Monorepo with the GitHub Flow branching strategy and a squash commit merging strategy + for a single release channel on the default branch. + + Implementation: + - The monorepo contains two packages, each with its own internal changelog but shared template. + - The repository implements the following git graph: + + ``` + * chore(release): pkg-1@1.1.0 [skip ci] (tag: pkg1-v1.1.0) + * feat(pkg1): file modified outside of pkg 1, identified by scope (#5) + | + | * feat(pkg1): file modified outside of pkg 1, identified by scope (branch: pkg1/feat/pr-4) + |/ + * chore(release): pkg-2@1.1.1 [skip ci] (HEAD -> main, tag: pkg2-v1.1.1) + * fix(pkg2-cli): file modified outside of pkg 2, identified by scope (#4) + | + | * fix(pkg2-cli): file modified outside of pkg 2, identified by scope (branch: pkg2/fix/pr-3) + |/ + * chore(release): pkg-2@1.1.0 [skip ci] (tag: pkg2-v1.1.0) + * feat: no pkg scope but file in pkg 2 directory (#3) # Squash merge of pkg2/feat/pr-2 + * chore(release): pkg-1@1.0.1 [skip ci] (tag: pkg1-v1.0.1) + * fix: no pkg scope but file in pkg 1 directory (#2) # Squash merge of pkg1/fix/pr-1 + | + | * docs(cli): add cli documentation + | * test(cli): add cli tests + | * feat: no pkg scope but file in pkg 2 directory (branch: pkg2/feat/pr-2) + |/ + | * fix: no pkg scope but file in pkg 1 directory (branch: pkg1/fix/pr-1) + |/ + * chore(release): pkg-2@1.0.0 [skip ci] (tag: pkg2-v1.0.0) # Initial release of pkg 2 + * chore(release): pkg-1@1.0.0 [skip ci] (tag: pkg1-v1.0.0) # Initial release of pkg 1 + * Initial commit # Includes core functionality for both packages + ``` + """ + + def _get_repo_from_definition( + commit_type: CommitConvention, + hvcs_client_name: str = "github", + hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, + tag_format_str: str | None = default_tag_format_str, + extra_configs: dict[str, TomlSerializableTypes] | None = None, + mask_initial_release: bool = True, + ignore_merge_commits: bool = True, + ) -> Sequence[RepoActions]: + stable_now_datetime = stable_now_date() + commit_timestamp_gen = ( + (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") + for i in count(step=1) + ) + pr_num_gen = (i for i in count(start=2, step=1)) + + pkg1_changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ + { + "path": monorepo_pkg1_changelog_md_file, + "format": ChangelogOutputFormat.MARKDOWN, + "mask_initial_release": True, + }, + { + "path": monorepo_pkg1_changelog_rst_file, + "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, + "mask_initial_release": True, + }, + ] + + pkg2_changelog_file_definitions: Sequence[RepoActionWriteChangelogsDestFile] = [ + { + "path": monorepo_pkg2_changelog_md_file, + "format": ChangelogOutputFormat.MARKDOWN, + "mask_initial_release": True, + }, + { + "path": monorepo_pkg2_changelog_rst_file, + "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, + "mask_initial_release": True, + }, + ] + + change_to_pkg1_dir: RepoActionChangeDirectory = { + "action": RepoActionStep.CHANGE_DIRECTORY, + "details": { + "directory": monorepo_pkg1_dir, + }, + } + + change_to_pkg2_dir: RepoActionChangeDirectory = { + "action": RepoActionStep.CHANGE_DIRECTORY, + "details": { + "directory": monorepo_pkg2_dir, + }, + } + + change_to_example_project_dir: RepoActionChangeDirectory = { + "action": RepoActionStep.CHANGE_DIRECTORY, + "details": { + "directory": "/", + }, + } + + if commit_type != "conventional": + raise ValueError(f"Unsupported commit type: {commit_type}") + + pkg1_commit_parser = ConventionalCommitMonorepoParser( + options=ConventionalCommitMonorepoParserOptions( + parse_squash_commits=True, + ignore_merge_commits=ignore_merge_commits, + scope_prefix=f"{monorepo_pkg1_name}-?", + path_filters=(".",), + ) + ) + + pkg2_commit_parser = ConventionalCommitMonorepoParser( + options=ConventionalCommitMonorepoParserOptions( + parse_squash_commits=pkg1_commit_parser.options.parse_squash_commits, + ignore_merge_commits=pkg1_commit_parser.options.ignore_merge_commits, + scope_prefix=f"{monorepo_pkg2_name}-?", + path_filters=(".",), + ) + ) + + common_configs: dict[str, TomlSerializableTypes] = { + # Set the default release branch + "tool.semantic_release.branches.main": { + "match": r"^(main|master)$", + "prerelease": False, + }, + "tool.semantic_release.allow_zero_version": False, + "tool.semantic_release.changelog.exclude_commit_patterns": [r"^chore"], + "tool.semantic_release.commit_parser": f"{commit_type}-monorepo", + "tool.semantic_release.commit_parser_options.parse_squash_commits": pkg1_commit_parser.options.parse_squash_commits, + "tool.semantic_release.commit_parser_options.ignore_merge_commits": pkg1_commit_parser.options.ignore_merge_commits, + } + + mr1_pkg1_fix_branch_name = f"{monorepo_pkg1_name}/fix/pr-1" + mr2_pkg2_feat_branch_name = f"{monorepo_pkg2_name}/feat/pr-2" + mr3_pkg2_fix_branch_name = f"{monorepo_pkg2_name}/fix/pr-3" + mr4_pkg1_feat_branch_name = f"{monorepo_pkg1_name}/feat/pr-4" + + repo_construction_steps: list[RepoActions] = [ + { + "action": RepoActionStep.CREATE_MONOREPO, + "details": { + "commit_type": commit_type, + "hvcs_client_name": hvcs_client_name, + "hvcs_domain": hvcs_domain, + "post_actions": [ + { + "action": RepoActionStep.CONFIGURE_MONOREPO, + "details": { + "package_dir": monorepo_pkg1_dir, + "package_name": monorepo_pkg1_name, + "tag_format_str": ( + pkg1_tag_format_str + := f"{monorepo_pkg1_name}-{tag_format_str}" + ), + "mask_initial_release": mask_initial_release, + "extra_configs": { + **common_configs, + "tool.semantic_release.commit_message": ( + pkg1_cmt_msg_format := dedent( + f"""\ + chore(release): {monorepo_pkg1_name}@{{version}} [skip ci] + + Automatically generated by python-semantic-release + """ + ) + ), + "tool.semantic_release.commit_parser_options.scope_prefix": pkg1_commit_parser.options.scope_prefix, + "tool.semantic_release.commit_parser_options.path_filters": pkg1_commit_parser.options.path_filters, + **(extra_configs or {}), + }, + }, + }, + { + "action": RepoActionStep.CONFIGURE_MONOREPO, + "details": { + "package_dir": monorepo_pkg2_dir, + "package_name": monorepo_pkg2_name, + "tag_format_str": ( + pkg2_tag_format_str + := f"{monorepo_pkg2_name}-{tag_format_str}" + ), + "mask_initial_release": mask_initial_release, + "extra_configs": { + **common_configs, + "tool.semantic_release.commit_message": ( + pkg2_cmt_msg_format := dedent( + f"""\ + chore(release): {monorepo_pkg2_name}@{{version}} [skip ci] + + Automatically generated by python-semantic-release + """ + ) + ), + "tool.semantic_release.commit_parser_options.scope_prefix": pkg2_commit_parser.options.scope_prefix, + "tool.semantic_release.commit_parser_options.path_filters": pkg2_commit_parser.options.path_filters, + **(extra_configs or {}), + }, + }, + }, + { + "action": RepoActionStep.MAKE_COMMITS, + "details": { + "commits": convert_commit_specs_to_commit_defs( + [ + { + "cid": "initial", + "conventional": INITIAL_COMMIT_MESSAGE, + "emoji": INITIAL_COMMIT_MESSAGE, + "scipy": INITIAL_COMMIT_MESSAGE, + "datetime": next(commit_timestamp_gen), + "include_in_changelog": bool( + commit_type == "emoji" + ), + }, + ], + commit_type, + # this parser does not matter since the commit is common + parser=cast( + "CommitParser[ParseResult, ParserOptions]", + pkg1_commit_parser, + ), + monorepo=True, + ), + }, + }, + ], + }, + } + ] + + pkg1_new_version = Version.parse("1.0.0", tag_format=pkg1_tag_format_str) + pkg2_new_version = Version.parse("1.0.0", tag_format=pkg2_tag_format_str) + + repo_construction_steps.extend( + [ + { + "action": RepoActionStep.RELEASE, + "details": { + "version": str(pkg1_new_version), + "datetime": next(commit_timestamp_gen), + "tag_format": pkg1_tag_format_str, + "version_py_file": monorepo_pkg1_version_py_file.relative_to( + monorepo_pkg1_dir + ), + "commit_message_format": pkg1_cmt_msg_format, + "pre_actions": [ + { + "action": RepoActionStep.WRITE_CHANGELOGS, + "details": { + "new_version": pkg1_new_version, + "dest_files": pkg1_changelog_file_definitions, + "commit_ids": ["initial"], + }, + }, + change_to_pkg1_dir, + ], + "post_actions": [change_to_example_project_dir], + }, + }, + { + "action": RepoActionStep.RELEASE, + "details": { + "version": str(pkg2_new_version), + "datetime": next(commit_timestamp_gen), + "tag_format": pkg2_tag_format_str, + "version_py_file": monorepo_pkg2_version_py_file.relative_to( + monorepo_pkg2_dir + ), + "commit_message_format": pkg2_cmt_msg_format, + "pre_actions": [ + { + "action": RepoActionStep.WRITE_CHANGELOGS, + "details": { + "new_version": pkg2_new_version, + "dest_files": pkg2_changelog_file_definitions, + "commit_ids": ["initial"], + }, + }, + change_to_pkg2_dir, + ], + "post_actions": [change_to_example_project_dir], + }, + }, + ] + ) + + pkg1_fix_branch_commits: Sequence[CommitSpec] = [ + { + "cid": "pkg1-fix-1-squashed", + "conventional": "fix: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", + "emoji": ":bug: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", + "scipy": "MAINT: no pkg scope but file in pkg 1 directory\n\nResolves: #123\n", + "datetime": next(commit_timestamp_gen), + }, + ] + + repo_construction_steps.extend( + [ + { + "action": RepoActionStep.GIT_CHECKOUT, + "details": { + "create_branch": { + "name": mr1_pkg1_fix_branch_name, + "start_branch": DEFAULT_BRANCH_NAME, + } + }, + }, + { + "action": RepoActionStep.MAKE_COMMITS, + "details": { + "pre_actions": [change_to_pkg1_dir], + "commits": convert_commit_specs_to_commit_defs( + [ + { + **commit, + "include_in_changelog": False, + } + for commit in pkg1_fix_branch_commits + ], + commit_type, + parser=cast( + "CommitParser[ParseResult, ParserOptions]", + pkg1_commit_parser, + ), + monorepo=True, + ), + "post_actions": [change_to_example_project_dir], + }, + }, + ] + ) + + # simulate separate work by another person at same time as the fix branch + pkg2_feat_branch_commits: Sequence[CommitSpec] = [ + { + "cid": "pkg2-feat-1-squashed", + "conventional": "feat: no pkg scope but file in pkg 2 directory", + "emoji": ":sparkles: no pkg scope but file in pkg 2 directory", + "scipy": "ENH: no pkg scope but file in pkg 2 directory", + "datetime": next(commit_timestamp_gen), + }, + { + "cid": "pkg2-feat-2-squashed", + "conventional": "test(cli): add cli tests", + "emoji": ":checkmark: add cli tests", + "scipy": "TST: add cli tests", + "datetime": next(commit_timestamp_gen), + }, + { + "cid": "pkg2-feat-3-squashed", + "conventional": "docs(cli): add cli documentation", + "emoji": ":memo: add cli documentation", + "scipy": "DOC: add cli documentation", + "datetime": next(commit_timestamp_gen), + }, + ] + + repo_construction_steps.extend( + [ + { + "action": RepoActionStep.GIT_CHECKOUT, + "details": { + "create_branch": { + "name": mr2_pkg2_feat_branch_name, + "start_branch": DEFAULT_BRANCH_NAME, + }, + }, + }, + { + "action": RepoActionStep.MAKE_COMMITS, + "details": { + "pre_actions": [change_to_pkg2_dir], + "commits": convert_commit_specs_to_commit_defs( + [ + { + **commit, + "include_in_changelog": False, + } + for commit in pkg2_feat_branch_commits + ], + commit_type, + parser=cast( + "CommitParser[ParseResult, ParserOptions]", + pkg2_commit_parser, + ), + monorepo=True, + ), + "post_actions": [change_to_example_project_dir], + }, + }, + ] + ) + + pkg1_new_version = Version.parse("1.0.1", tag_format=pkg1_tag_format_str) + + all_commit_types: list[CommitConvention] = ["conventional", "emoji", "scipy"] + fix_branch_pr_number = next(pr_num_gen) + fix_branch_squash_commit_spec: CommitSpec = { + "cid": "mr1-pkg1-fix", + **{ # type: ignore[typeddict-item] + cmt_type: format_squash_commit_msg_github( + # Use the primary commit message as the PR title + pr_title=pkg1_fix_branch_commits[0][cmt_type], + pr_number=fix_branch_pr_number, + squashed_commits=[ + cmt[commit_type] for cmt in pkg1_fix_branch_commits[1:] + ], + ) + for cmt_type in all_commit_types + }, + "datetime": next(commit_timestamp_gen), + "include_in_changelog": True, + } + + repo_construction_steps.extend( + [ + { + "action": RepoActionStep.GIT_CHECKOUT, + "details": {"branch": DEFAULT_BRANCH_NAME}, + }, + { + "action": RepoActionStep.GIT_SQUASH, + "details": { + "branch": mr1_pkg1_fix_branch_name, + "strategy_option": "theirs", + "commit_def": convert_commit_spec_to_commit_def( + fix_branch_squash_commit_spec, + commit_type, + parser=cast( + "CommitParser[ParseResult, ParserOptions]", + pkg1_commit_parser, + ), + monorepo=True, + ), + "config_file": monorepo_pkg1_pyproject_toml_file, + }, + }, + { + "action": RepoActionStep.RELEASE, + "details": { + "version": str(pkg1_new_version), + "datetime": next(commit_timestamp_gen), + "tag_format": pkg1_tag_format_str, + "version_py_file": monorepo_pkg1_version_py_file.relative_to( + monorepo_pkg1_dir + ), + "commit_message_format": pkg1_cmt_msg_format, + "pre_actions": [ + { + "action": RepoActionStep.WRITE_CHANGELOGS, + "details": { + "new_version": pkg1_new_version, + "dest_files": pkg1_changelog_file_definitions, + "commit_ids": [ + f'{fix_branch_squash_commit_spec["cid"]}-{index + 1}' + for index in range(len(pkg1_fix_branch_commits)) + ], + }, + }, + change_to_pkg1_dir, + ], + "post_actions": [change_to_example_project_dir], + }, + }, + ] + ) + + feat_branch_pr_number = next(pr_num_gen) + feat_branch_squash_commit_spec: CommitSpec = { + "cid": "mr2-pkg2-feat", + **{ # type: ignore[typeddict-item] + cmt_type: format_squash_commit_msg_github( + # Use the primary commit message as the PR title + pr_title=pkg2_feat_branch_commits[0][cmt_type], + pr_number=feat_branch_pr_number, + squashed_commits=[ + cmt[commit_type] for cmt in pkg2_feat_branch_commits[1:] + ], + ) + for cmt_type in all_commit_types + }, + "datetime": next(commit_timestamp_gen), + "include_in_changelog": True, + } + + pkg2_new_version = Version.parse("1.1.0", tag_format=pkg2_tag_format_str) + + repo_construction_steps.extend( + [ + { + "action": RepoActionStep.GIT_SQUASH, + "details": { + "branch": mr2_pkg2_feat_branch_name, + "strategy_option": "theirs", + "commit_def": convert_commit_spec_to_commit_def( + feat_branch_squash_commit_spec, + commit_type, + parser=cast( + "CommitParser[ParseResult, ParserOptions]", + pkg2_commit_parser, + ), + monorepo=True, + ), + "config_file": monorepo_pkg2_pyproject_toml_file, + }, + }, + { + "action": RepoActionStep.RELEASE, + "details": { + "version": str(pkg2_new_version), + "datetime": next(commit_timestamp_gen), + "tag_format": pkg2_tag_format_str, + "version_py_file": monorepo_pkg2_version_py_file.relative_to( + monorepo_pkg2_dir + ), + "commit_message_format": pkg2_cmt_msg_format, + "pre_actions": [ + { + "action": RepoActionStep.WRITE_CHANGELOGS, + "details": { + "new_version": pkg2_new_version, + "dest_files": pkg2_changelog_file_definitions, + "commit_ids": [ + f'{feat_branch_squash_commit_spec["cid"]}-{index + 1}' + for index in range( + len(pkg2_feat_branch_commits) + ) + ], + }, + }, + change_to_pkg2_dir, + ], + "post_actions": [change_to_example_project_dir], + }, + }, + ] + ) + + pkg2_fix_branch_commits: Sequence[CommitSpec] = [ + { + "cid": "pkg2-fix-1-squashed", + "conventional": "fix(pkg2-cli): file modified outside of pkg 2, identified by scope\n\nResolves: #123\n", + "emoji": ":bug: (pkg2-cli) file modified outside of pkg 2, identified by scope\n\nResolves: #123\n", + "scipy": "MAINT:pkg2-cli: file modified outside of pkg 2, identified by scope\n\nResolves: #123\n", + "datetime": next(commit_timestamp_gen), + }, + ] + + repo_construction_steps.extend( + [ + { + "action": RepoActionStep.GIT_CHECKOUT, + "details": { + "create_branch": { + "name": mr3_pkg2_fix_branch_name, + "start_branch": DEFAULT_BRANCH_NAME, + } + }, + }, + { + "action": RepoActionStep.MAKE_COMMITS, + "details": { + "commits": convert_commit_specs_to_commit_defs( + [ + { + **commit, + "include_in_changelog": False, + } + for commit in pkg2_fix_branch_commits + ], + commit_type, + parser=cast( + "CommitParser[ParseResult, ParserOptions]", + pkg2_commit_parser, + ), + monorepo=True, + ), + }, + }, + ] + ) + + pkg2_new_version = Version.parse("1.1.1", tag_format=pkg2_tag_format_str) + + fix_branch_pr_number = next(pr_num_gen) + fix_branch_squash_commit_spec: CommitSpec = { + "cid": "mr3-pkg2-fix", + **{ # type: ignore[typeddict-item] + cmt_type: format_squash_commit_msg_github( + # Use the primary commit message as the PR title + pr_title=pkg2_fix_branch_commits[0][cmt_type], + pr_number=fix_branch_pr_number, + squashed_commits=[ + cmt[commit_type] for cmt in pkg2_fix_branch_commits[1:] + ], + ) + for cmt_type in all_commit_types + }, + "datetime": next(commit_timestamp_gen), + "include_in_changelog": True, + } + + repo_construction_steps.extend( + [ + { + "action": RepoActionStep.GIT_CHECKOUT, + "details": {"branch": DEFAULT_BRANCH_NAME}, + }, + { + "action": RepoActionStep.GIT_SQUASH, + "details": { + "branch": mr3_pkg2_fix_branch_name, + "strategy_option": "theirs", + "commit_def": convert_commit_spec_to_commit_def( + fix_branch_squash_commit_spec, + commit_type, + parser=cast( + "CommitParser[ParseResult, ParserOptions]", + pkg2_commit_parser, + ), + monorepo=True, + ), + "config_file": monorepo_pkg2_pyproject_toml_file, + }, + }, + { + "action": RepoActionStep.RELEASE, + "details": { + "version": str(pkg2_new_version), + "datetime": next(commit_timestamp_gen), + "tag_format": pkg2_tag_format_str, + "version_py_file": monorepo_pkg2_version_py_file.relative_to( + monorepo_pkg2_dir + ), + "commit_message_format": pkg2_cmt_msg_format, + "pre_actions": [ + { + "action": RepoActionStep.WRITE_CHANGELOGS, + "details": { + "new_version": pkg2_new_version, + "dest_files": pkg2_changelog_file_definitions, + "commit_ids": [ + f'{fix_branch_squash_commit_spec["cid"]}-{index + 1}' + for index in range(len(pkg2_fix_branch_commits)) + ], + }, + }, + change_to_pkg2_dir, + ], + "post_actions": [change_to_example_project_dir], + }, + }, + ] + ) + + pkg1_feat_branch_commits: Sequence[CommitSpec] = [ + { + "cid": "pkg1-feat-1-squashed", + "conventional": "feat(pkg1): file modified outside of pkg 1, identified by scope", + "emoji": ":sparkles: (pkg1) file modified outside of pkg 1, identified by scope", + "scipy": "ENH:pkg1: file modified outside of pkg 1, identified by scope", + "datetime": next(commit_timestamp_gen), + } + ] + + repo_construction_steps.extend( + [ + { + "action": RepoActionStep.GIT_CHECKOUT, + "details": { + "create_branch": { + "name": mr4_pkg1_feat_branch_name, + "start_branch": DEFAULT_BRANCH_NAME, + }, + }, + }, + { + "action": RepoActionStep.MAKE_COMMITS, + "details": { + "commits": convert_commit_specs_to_commit_defs( + [ + { + **commit, + "include_in_changelog": False, + } + for commit in pkg1_feat_branch_commits + ], + commit_type, + parser=cast( + "CommitParser[ParseResult, ParserOptions]", + pkg1_commit_parser, + ), + monorepo=True, + ), + }, + }, + ] + ) + + feat_branch_pr_number = next(pr_num_gen) + feat_branch_squash_commit_spec: CommitSpec = { + "cid": "mr4-pkg1-feat", + **{ # type: ignore[typeddict-item] + cmt_type: format_squash_commit_msg_github( + # Use the primary commit message as the PR title + pr_title=pkg1_feat_branch_commits[0][cmt_type], + pr_number=feat_branch_pr_number, + squashed_commits=[ + cmt[commit_type] for cmt in pkg1_feat_branch_commits[1:] + ], + ) + for cmt_type in all_commit_types + }, + "datetime": next(commit_timestamp_gen), + "include_in_changelog": True, + } + + pkg1_new_version = Version.parse("1.1.0", tag_format=pkg1_tag_format_str) + + repo_construction_steps.extend( + [ + { + "action": RepoActionStep.GIT_CHECKOUT, + "details": {"branch": DEFAULT_BRANCH_NAME}, + }, + { + "action": RepoActionStep.GIT_SQUASH, + "details": { + "branch": mr4_pkg1_feat_branch_name, + "strategy_option": "theirs", + "commit_def": convert_commit_spec_to_commit_def( + feat_branch_squash_commit_spec, + commit_type, + parser=cast( + "CommitParser[ParseResult, ParserOptions]", + pkg1_commit_parser, + ), + monorepo=True, + ), + "config_file": monorepo_pkg1_pyproject_toml_file, + }, + }, + { + "action": RepoActionStep.RELEASE, + "details": { + "version": str(pkg1_new_version), + "datetime": next(commit_timestamp_gen), + "tag_format": pkg1_tag_format_str, + "version_py_file": monorepo_pkg1_version_py_file.relative_to( + monorepo_pkg1_dir + ), + "commit_message_format": pkg1_cmt_msg_format, + "pre_actions": [ + { + "action": RepoActionStep.WRITE_CHANGELOGS, + "details": { + "new_version": pkg1_new_version, + "dest_files": pkg1_changelog_file_definitions, + "commit_ids": [ + f'{feat_branch_squash_commit_spec["cid"]}-{index + 1}' + for index in range( + len(pkg1_feat_branch_commits) + ) + ], + }, + }, + change_to_pkg1_dir, + ], + "post_actions": [change_to_example_project_dir], + }, + }, + ] + ) + + return repo_construction_steps + + return _get_repo_from_definition + + +@pytest.fixture(scope="session") +def build_monorepo_w_github_flow_w_default_release_channel( + build_repo_from_definition: BuildRepoFromDefinitionFn, + get_repo_definition_4_github_flow_monorepo_w_default_release_channel: GetRepoDefinitionFn, + get_cached_repo_data: GetCachedRepoDataFn, + build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, + build_spec_hash_4_github_flow_monorepo_w_default_release_channel: str, +) -> BuildSpecificRepoFn: + def _build_specific_repo_type( + repo_name: str, commit_type: CommitConvention, dest_dir: Path + ) -> Sequence[RepoActions]: + def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: + repo_construction_steps = ( + get_repo_definition_4_github_flow_monorepo_w_default_release_channel( + commit_type=commit_type, + ) + ) + return build_repo_from_definition(cached_repo_path, repo_construction_steps) + + build_repo_or_copy_cache( + repo_name=repo_name, + build_spec_hash=build_spec_hash_4_github_flow_monorepo_w_default_release_channel, + build_repo_func=_build_repo, + dest_dir=dest_dir, + ) + + if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): + raise ValueError("Failed to retrieve repo data from cache") + + return cached_repo_data["build_definition"] + + return _build_specific_repo_type + + +# --------------------------------------------------------------------------- # +# Test-level fixtures that will cache the built directory & set up test case # +# --------------------------------------------------------------------------- # + + +@pytest.fixture +def monorepo_w_github_flow_w_default_release_channel_conventional_commits( + build_monorepo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, + example_project_git_repo: ExProjectGitRepoFn, + example_project_dir: ExProjectDir, + change_to_ex_proj_dir: None, +) -> BuiltRepoResult: + repo_name = ( + monorepo_w_github_flow_w_default_release_channel_conventional_commits.__name__ + ) + commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] + + return { + "definition": build_monorepo_w_github_flow_w_default_release_channel( + repo_name=repo_name, + commit_type=commit_type, + dest_dir=example_project_dir, + ), + "repo": example_project_git_repo(), + } diff --git a/tests/util.py b/tests/util.py index 9c884c50b..2a5da5676 100644 --- a/tests/util.py +++ b/tests/util.py @@ -152,7 +152,13 @@ def shortuid(length: int = 8) -> str: def add_text_to_file(repo: Repo, filename: str, text: str | None = None): """Makes a deterministic file change for testing""" - tgt_file = Path(repo.working_tree_dir or ".") / filename + tgt_file = Path(filename).resolve().absolute() + + if not tgt_file.is_relative_to(Path(repo.working_dir).resolve().absolute()): + raise ValueError( + f"File {tgt_file} is not relative to the repository working directory {repo.working_dir}" + ) + tgt_file.parent.mkdir(parents=True, exist_ok=True) file_contents = tgt_file.read_text() if tgt_file.exists() else "" line_number = len(file_contents.splitlines()) @@ -160,7 +166,7 @@ def add_text_to_file(repo: Repo, filename: str, text: str | None = None): file_contents += f"{line_number} {text or 'default text'}{os.linesep}" tgt_file.write_text(file_contents, encoding="utf-8") - repo.index.add(filename) + repo.index.add(tgt_file) def flatten_dircmp(dcmp: filecmp.dircmp) -> list[str]: pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy