Compare commits

..

15 Commits

Author SHA1 Message Date
278386c3d5 Bump version to v3.0.3
All checks were successful
continuous-integration/drone/push Build is passing
2024-11-18 15:15:27 -08:00
1bd66e42de Fix broken py37 build
All checks were successful
continuous-integration/drone/push Build is passing
Forgot walrus aren't supported in py37
2024-11-18 15:14:57 -08:00
04fa347d28 Bump version to v3.0.2
Some checks failed
continuous-integration/drone/push Build is failing
continuous-integration/drone/tag Build is failing
2024-11-18 11:40:37 -08:00
b76826a873 Fix extracting named members from tar file 2024-11-18 11:39:49 -08:00
583cd2b0bb Fix broken extract all files flag 2024-11-18 11:39:13 -08:00
f9c462b94a Improve debug logging
This also includes one fix that I discovered while improving the
logging. Even if a git url was provided, release_gitter was lookig for a
local package declaration (Cargo.toml) to identify the version.

With this change, the url parsing and the local repo logic are split
allowing for more detailed logging as well as avoiding this potential
bug.
2024-11-18 11:36:40 -08:00
b59e908d84 Bump version to v3.0.1
Some checks reported errors
continuous-integration/drone/push Build was killed
continuous-integration/drone/tag Build is passing
2024-11-11 12:50:55 -08:00
3059b36908 Remove conflicting cli arguments 2024-11-11 12:50:32 -08:00
29df64c07b Build: Try to cache pip between tests
Some checks reported errors
continuous-integration/drone/push Build was killed
2024-11-11 12:47:04 -08:00
c1dd243035 Bump version to v3.0.0 because of possible breaking change
Some checks reported errors
continuous-integration/drone/push Build was killed
continuous-integration/drone/tag Build is passing
2024-11-11 12:30:32 -08:00
0bb2277e26 BREAKING: Add the ability to download and extract to a temp dir
All checks were successful
continuous-integration/drone/push Build is passing
This also will execute any --exec scripts from the dest directory
2024-11-11 12:29:17 -08:00
6fe0869e8b Bump version to v2.5.2
Some checks reported errors
continuous-integration/drone/push Build was killed
continuous-integration/drone/tag Build is passing
2024-11-07 16:01:05 -08:00
16fb7ca849 Forgot one spot 2024-11-07 16:00:48 -08:00
bcf65ce10f Bump version to v2.5.1
Some checks reported errors
continuous-integration/drone/push Build was killed
continuous-integration/drone/tag Build is passing
2024-11-07 15:39:47 -08:00
4eead212cf Allow format values in exec calls 2024-11-07 15:39:12 -08:00
5 changed files with 135 additions and 42 deletions

View File

@ -1,11 +1,12 @@
# Build pipelines # Build pipelines
PYTHON_VERSIONS = [ PYTHON_VERSIONS = [
"3.7",
"3.8",
"3.9", "3.9",
"3.10", "3.10",
"3.11", "3.11",
"3.12", "3.12",
"3.13",
"latest", "latest",
] ]
@ -71,6 +72,9 @@ def test_step(docker_tag, python_cmd="python"):
"{} -V".format(python_cmd), "{} -V".format(python_cmd),
"make clean-all test" "make clean-all test"
], ],
"environment": {
"PIP_CACHE_DIR": ".pip-cache",
},
} }

View File

@ -14,6 +14,7 @@ import toml
from wheel.wheelfile import WheelFile from wheel.wheelfile import WheelFile
import release_gitter as rg import release_gitter as rg
from release_gitter import removeprefix
@dataclass @dataclass
@ -58,7 +59,7 @@ def read_metadata() -> Config:
raise ValueError("Must have configuration in [tool.release-gitter]") raise ValueError("Must have configuration in [tool.release-gitter]")
git_url = pyproject.pop("git-url", None) git_url = pyproject.pop("git-url", None)
remote_info = rg.parse_git_remote(git_url) remote_info = rg.parse_git_url(git_url)
config = Config( config = Config(
name=pyproject.pop("name", remote_info.repo), name=pyproject.pop("name", remote_info.repo),
@ -95,7 +96,7 @@ class _PseudoBuildBackend:
print("Prepare meta", metadata_directory, config_settings) print("Prepare meta", metadata_directory, config_settings)
metadata = read_metadata() metadata = read_metadata()
version = metadata.version.removeprefix("v") if metadata.version else "0.0.0" version = removeprefix(metadata.version, "v") if metadata.version else "0.0.0"
# Returns distinfo dir? # Returns distinfo dir?
dist_info = Path(metadata_directory) / f"{metadata.name}-{version}.dist-info" dist_info = Path(metadata_directory) / f"{metadata.name}-{version}.dist-info"
@ -144,7 +145,7 @@ class _PseudoBuildBackend:
metadata_directory = Path(metadata_directory) metadata_directory = Path(metadata_directory)
metadata = read_metadata() metadata = read_metadata()
version = metadata.version.removeprefix("v") if metadata.version else "0.0.0" version = removeprefix(metadata.version, "v") if metadata.version else "0.0.0"
wheel_directory = Path(wheel_directory) wheel_directory = Path(wheel_directory)
wheel_directory.mkdir(exist_ok=True) wheel_directory.mkdir(exist_ok=True)

View File

@ -19,7 +19,7 @@ authors = [
maintainers = [ maintainers = [
{ name = "Ian Fijolek", email = "iamthefij@gmail.com" } { name = "Ian Fijolek", email = "iamthefij@gmail.com" }
] ]
requires-python = ">=3.9" requires-python = ">=3.7"
dependencies = ["requests"] dependencies = ["requests"]
[project.optional-dependencies] [project.optional-dependencies]
@ -53,7 +53,7 @@ run = [
] ]
[[tool.hatch.envs.test.matrix]] [[tool.hatch.envs.test.matrix]]
python = ["3", "3.9", "3.10", "3.11", "3.12", "3.13"] python = ["3", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
[tool.hatch.envs.lint] [tool.hatch.envs.lint]
detached = true detached = true

View File

@ -2,7 +2,9 @@
from __future__ import annotations from __future__ import annotations
import argparse import argparse
import logging
import platform import platform
import tempfile
from collections.abc import Sequence from collections.abc import Sequence
from dataclasses import dataclass from dataclasses import dataclass
from io import BytesIO from io import BytesIO
@ -20,7 +22,10 @@ from zipfile import ZipFile
import requests import requests
__version__ = "2.5.0" __version__ = "3.0.3"
logging.basicConfig(level=logging.WARNING)
class UnsupportedContentTypeError(ValueError): class UnsupportedContentTypeError(ValueError):
@ -31,6 +36,24 @@ class InvalidRemoteError(ValueError):
pass pass
def removeprefix(s: str, pre: str) -> str:
# Duplicate str.removeprefix for py<3.9
try:
return s.removeprefix(pre) # type: ignore
except AttributeError:
# Py < 3.9
return s[len(pre) :] if s and s.startswith(pre) else s
def removesuffix(s: str, suf: str) -> str:
# Duplicate str.removesuffix for py<3.9
try:
return s.removesuffix(suf) # type: ignore
except AttributeError:
# Py < 3.9
return s[: -len(suf)] if s and s.endswith(suf) else s
SYSTEM_SYNONYMS: list[list[str]] = [ SYSTEM_SYNONYMS: list[list[str]] = [
["Darwin", "darwin", "MacOS", "macos", "macOS"], ["Darwin", "darwin", "MacOS", "macos", "macOS"],
["Windows", "windows", "win", "win32", "win64"], ["Windows", "windows", "win", "win32", "win64"],
@ -103,13 +126,13 @@ class GitRemoteInfo:
) )
def parse_git_remote(git_url: str | None = None) -> GitRemoteInfo: def read_git_remote() -> str:
"""Extract Github repo info from a git remote url""" """Reads the git remote url from the origin"""
if not git_url: return check_output(["git", "remote", "get-url", "origin"]).decode("UTF-8").strip()
git_url = (
check_output(["git", "remote", "get-url", "origin"]).decode("UTF-8").strip()
)
def parse_git_url(git_url: str) -> GitRemoteInfo:
"""Extract Github repo info from a git remote url"""
# Normalize Github ssh url as a proper URL # Normalize Github ssh url as a proper URL
if git_url.startswith("git@github.com:"): if git_url.startswith("git@github.com:"):
git_ssh_parts = git_url.partition(":") git_ssh_parts = git_url.partition(":")
@ -129,7 +152,7 @@ def parse_git_remote(git_url: str | None = None) -> GitRemoteInfo:
f"{path[1:3]} Could not parse owner and repo from URL {git_url}" f"{path[1:3]} Could not parse owner and repo from URL {git_url}"
) )
return GitRemoteInfo(u.hostname, path[1], path[2].removesuffix(".git")) return GitRemoteInfo(u.hostname, path[1], removesuffix(path[2], ".git"))
def parse_cargo_version(p: Path) -> str: def parse_cargo_version(p: Path) -> str:
@ -156,6 +179,7 @@ def read_git_tag(fetch: bool = True) -> str | None:
def read_version(from_tags: bool = False, fetch: bool = False) -> str | None: def read_version(from_tags: bool = False, fetch: bool = False) -> str | None:
"""Read version information from file or from git""" """Read version information from file or from git"""
if from_tags: if from_tags:
logging.debug("Reading version from git tag")
return read_git_tag(fetch) return read_git_tag(fetch)
matchers = { matchers = {
@ -165,10 +189,13 @@ def read_version(from_tags: bool = False, fetch: bool = False) -> str | None:
for name, extractor in matchers.items(): for name, extractor in matchers.items():
p = Path(name) p = Path(name)
if p.exists(): if p.exists():
logging.debug(f"Reading version from {p}")
return extractor(p) return extractor(p)
# TODO: Log this out to stderr logging.warning(
# raise ValueError(f"Unknown project type. Didn't find any of {matchers.keys()}") "Unknown local project version. Didn't find any of %s", set(matchers.keys())
)
return None return None
@ -191,6 +218,8 @@ def fetch_release(
# Return the latest if requested # Return the latest if requested
if version is None or version == "latest": if version is None or version == "latest":
logging.debug("Looking for latest release")
for release in result.json(): for release in result.json():
if release["prerelease"] and not pre_release: if release["prerelease"] and not pre_release:
continue continue
@ -200,6 +229,8 @@ def fetch_release(
# Return matching version # Return matching version
for release in result.json(): for release in result.json():
if release["tag_name"].endswith(version): if release["tag_name"].endswith(version):
logging.debug(f"Found release {release['name']} matching version {version}")
return release return release
raise ValueError( raise ValueError(
@ -250,13 +281,7 @@ def match_asset(
# This should never really happen # This should never really happen
if version is None: if version is None:
if "{version}" in format: raise ValueError("No version provided or found in release name.")
raise ValueError(
"No version provided or found in release name but is in format"
)
else:
# This should never happen, but since version isn't used anywhere, we can make it an empty string
version = ""
system = platform.system() system = platform.system()
if system_mapping: if system_mapping:
@ -304,8 +329,12 @@ class PackageAdapter:
"application/zip", "application/zip",
"application/x-zip-compressed", "application/x-zip-compressed",
): ):
logging.debug("Opening zip file from response content")
self._package = ZipFile(BytesIO(response.content)) self._package = ZipFile(BytesIO(response.content))
elif content_type == "application/x-tar": elif content_type == "application/x-tar":
logging.debug("Opening tar file from response content")
self._package = TarFile(fileobj=response.raw) self._package = TarFile(fileobj=response.raw)
elif content_type in ( elif content_type in (
"application/gzip", "application/gzip",
@ -313,6 +342,8 @@ class PackageAdapter:
"application/x-tar+xz", "application/x-tar+xz",
"application/x-compressed-tar", "application/x-compressed-tar",
): ):
logging.debug("Opening compressed tar file from response content")
self._package = TarFile.open(fileobj=BytesIO(response.content), mode="r:*") self._package = TarFile.open(fileobj=BytesIO(response.content), mode="r:*")
else: else:
raise UnsupportedContentTypeError( raise UnsupportedContentTypeError(
@ -323,6 +354,7 @@ class PackageAdapter:
"""Get list of all file names in package""" """Get list of all file names in package"""
if isinstance(self._package, ZipFile): if isinstance(self._package, ZipFile):
return self._package.namelist() return self._package.namelist()
if isinstance(self._package, TarFile): if isinstance(self._package, TarFile):
return self._package.getnames() return self._package.getnames()
@ -340,19 +372,26 @@ class PackageAdapter:
If the `file_names` list is empty, all files will be extracted""" If the `file_names` list is empty, all files will be extracted"""
if path is None: if path is None:
path = Path.cwd() path = Path.cwd()
if not members: if not members:
logging.debug("Extracting all members to %s", path)
self._package.extractall(path=path) self._package.extractall(path=path)
return self.get_names() return self.get_names()
# TODO: Use walrus operator when dropping 3.7 support
missing_members = set(members) - set(self.get_names()) missing_members = set(members) - set(self.get_names())
if missing_members: if missing_members:
raise ValueError(f"Missing members: {missing_members}") raise ValueError(f"Missing members: {missing_members}")
logging.debug("Extracting members %s to %s", members, path)
if isinstance(self._package, ZipFile): if isinstance(self._package, ZipFile):
self._package.extractall(path=path, members=members) self._package.extractall(path=path, members=members)
if isinstance(self._package, TarFile): if isinstance(self._package, TarFile):
self._package.extractall( self._package.extractall(
path=path, members=(TarInfo(name) for name in members) path=path, members=(self._package.getmember(name) for name in members)
) )
return members return members
@ -375,7 +414,7 @@ def get_asset_package(
continue continue
else: else:
raise UnsupportedContentTypeError( raise UnsupportedContentTypeError(
"Cannot extract files from archive because we don't recognize the content type" f"Cannot extract files from archive because we don't recognize the content types {possible_content_types}"
) )
@ -402,8 +441,10 @@ def download_asset(
result = requests.get(asset["browser_download_url"]) result = requests.get(asset["browser_download_url"])
if extract_files is not None: if extract_files is not None:
logging.info("Extracting package %s", asset["name"])
package = get_asset_package(asset, result) package = get_asset_package(asset, result)
extract_files = package.extractall(path=destination, members=extract_files) extract_files = package.extractall(path=destination, members=extract_files)
return [destination / name for name in extract_files] return [destination / name for name in extract_files]
file_name = destination / asset["name"] file_name = destination / asset["name"]
@ -450,6 +491,7 @@ class MapAddAction(argparse.Action):
def _parse_args(args: list[str] | None = None) -> argparse.Namespace: def _parse_args(args: list[str] | None = None) -> argparse.Namespace:
logging.debug("Parsing arguments: %s", args)
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
"format", "format",
@ -463,7 +505,9 @@ def _parse_args(args: list[str] | None = None) -> argparse.Namespace:
default=Path.cwd(), default=Path.cwd(),
help="Destination directory. Defaults to current directory", help="Destination directory. Defaults to current directory",
) )
parser.add_argument("-v", action="store_true", help="verbose logging") parser.add_argument(
"-v", action="count", help="verbose or debug logging", default=0
)
parser.add_argument( parser.add_argument(
"--hostname", "--hostname",
help="Git repository hostname", help="Git repository hostname",
@ -534,12 +578,29 @@ def _parse_args(args: list[str] | None = None) -> argparse.Namespace:
action="store_true", action="store_true",
help="Only print the URL and do not download", help="Only print the URL and do not download",
) )
parser.add_argument(
"--use-temp-dir",
action="store_true",
help="Use a temporary directory as the destination",
)
parsed_args = parser.parse_args(args) parsed_args = parser.parse_args(args)
# Merge in fields from args and git remote # Merge in fields from args and git remote
if not all((parsed_args.owner, parsed_args.repo, parsed_args.hostname)): if not all((parsed_args.owner, parsed_args.repo, parsed_args.hostname)):
remote_info = parse_git_remote(parsed_args.git_url) # Check to see if a git url was provided. If not, we use local directory git remote
if parsed_args.git_url is None:
parsed_args.git_url = read_git_remote()
# If using a local repo, try to determine version from project files
if parsed_args.version is None:
parsed_args.version = read_version(
parsed_args.version_git_tag,
not parsed_args.version_git_no_fetch,
)
# Get parts from git url
remote_info = parse_git_url(parsed_args.git_url)
def merge_field(a, b, field): def merge_field(a, b, field):
value = getattr(a, field) value = getattr(a, field)
@ -549,15 +610,12 @@ def _parse_args(args: list[str] | None = None) -> argparse.Namespace:
for field in ("owner", "repo", "hostname"): for field in ("owner", "repo", "hostname"):
merge_field(parsed_args, remote_info, field) merge_field(parsed_args, remote_info, field)
if parsed_args.version is None:
parsed_args.version = read_version(
parsed_args.version_git_tag,
not parsed_args.version_git_no_fetch,
)
if parsed_args.extract_all: if parsed_args.extract_all:
parsed_args.extract_files = [] parsed_args.extract_files = []
if parsed_args.use_temp_dir:
parsed_args.destination = Path(tempfile.mkdtemp())
return parsed_args return parsed_args
@ -590,9 +648,14 @@ def download_release(
arch_mapping=arch_mapping, arch_mapping=arch_mapping,
) )
format_fields = dict(
asset_name=asset["name"],
**matched_values._asdict(),
)
formatted_files = ( formatted_files = (
[file.format(**matched_values._asdict()) for file in extract_files] [file.format(**format_fields) for file in extract_files]
if extract_files if extract_files is not None
else None else None
) )
@ -603,7 +666,9 @@ def download_release(
) )
if exec: if exec:
check_call(exec.format(asset["name"]), shell=True, cwd=destination) check_call(
exec.format(asset["name"], **format_fields), shell=True, cwd=destination
)
return files return files
@ -611,6 +676,8 @@ def download_release(
def main(): def main():
args = _parse_args() args = _parse_args()
logging.getLogger().setLevel(30 - 10 * args.v)
# Fetch the release # Fetch the release
release = fetch_release( release = fetch_release(
GitRemoteInfo(args.hostname, args.owner, args.repo), GitRemoteInfo(args.hostname, args.owner, args.repo),
@ -618,8 +685,12 @@ def main():
pre_release=args.prerelease, pre_release=args.prerelease,
) )
logging.debug("Found release: %s", release["name"])
version = args.version or release["tag_name"] version = args.version or release["tag_name"]
logging.debug("Release version: %s", version)
# Find the asset to download using mapping rules # Find the asset to download using mapping rules
asset, matched_values = match_asset( asset, matched_values = match_asset(
release, release,
@ -629,17 +700,21 @@ def main():
arch_mapping=args.map_arch, arch_mapping=args.map_arch,
) )
if args.v: logging.info(f"Downloading {asset['name']} from release {release['name']}")
print(f"Downloading {asset['name']} from release {release['name']}")
if args.url_only: if args.url_only:
print(asset["browser_download_url"]) print(asset["browser_download_url"])
return return
format_fields = dict(
asset_name=asset["name"],
**matched_values._asdict(),
)
# Format files to extract with version info, as this is sometimes included # Format files to extract with version info, as this is sometimes included
formatted_files = ( formatted_files = (
[file.format(**matched_values._asdict()) for file in args.extract_files] [file.format(**format_fields) for file in args.extract_files]
if args.extract_files if args.extract_files is not None
else None else None
) )
@ -653,7 +728,11 @@ def main():
# Optionally execute post command # Optionally execute post command
if args.exec: if args.exec:
check_call(args.exec.format(asset["name"]), shell=True) check_call(
args.exec.format(asset["name"], **format_fields),
shell=True,
cwd=args.destination,
)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -41,6 +41,15 @@ class TestExpression(NamedTuple):
raise raise
class TestGeneral(unittest.TestCase):
def test_removesuffix(self):
for test_case in (
TestExpression(self, ["repo.git", ".git"], {}, "repo"),
TestExpression(self, ["repo", ".git"], {}, "repo"),
):
test_case.run(release_gitter.removesuffix)
class TestRemoteInfo(unittest.TestCase): class TestRemoteInfo(unittest.TestCase):
def test_parse_remote_info(self): def test_parse_remote_info(self):
for test_case in ( for test_case in (
@ -73,7 +82,7 @@ class TestRemoteInfo(unittest.TestCase):
release_gitter.InvalidRemoteError, release_gitter.InvalidRemoteError,
), ),
): ):
test_case.run(release_gitter.parse_git_remote) test_case.run(release_gitter.parse_git_url)
def test_generate_release_url(self): def test_generate_release_url(self):
for subtest in ( for subtest in (